Autoregressive Models

Imports

In [1]:
import sys
sys.path.insert(0, '../src/')

import warnings
warnings.filterwarnings('ignore')

%matplotlib inline

from datetime import date
import geopandas as gpd
from IPython.display import display, HTML
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from pandas.plotting import lag_plot
from pandas.plotting import autocorrelation_plot
from statsmodels.tsa.ar_model import AR
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
from utils import load_pkl, generate_times
import seaborn as sns; sns.set()
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import StratifiedKFold
from metrics import *

from preprocessing import normalize

import tqdm as tqdm
from tqdm.autonotebook import tqdm
tqdm.pandas()

# Imports classes
from Baseline import *
from Regressor import *
from utils import *

from IPython.core.interactiveshell import InteractiveShell
InteractiveShell.ast_node_interactivity = "all"

Loading Data

Contour Iris

In [2]:
contour_iris = gpd.read_file(
    '../datasets/iris/iris.shp')

convert_to_int = ['dep', 'insee_com', 'iris', 'code_iris']
for col in convert_to_int:
    contour_iris[col] = contour_iris[col].astype(int)

contour_iris = contour_iris[['code_iris', 'geometry', 'dep']]
contour_iris.head();

Stations and Dates

In [3]:
station_data = pd.read_csv("../datasets/station_to_iris.csv")
station_data.describe();
In [4]:
stations_mode = load_pkl("../datasets/stations_mode.pkl")
subway_stations = [k for k, v in stations_mode.items() if v == 3]
print("Number of Subway stations: {}".format(len(subway_stations)))
Number of Subway stations: 303

Subways stations with less than $80000$ validations per $3$ month. Note that this is before we normalize the data. In the article, they removed $3$ subways stations, assuming that it was closed for renovation work. We printed below the $4$ stations with smaller number of validations.

In [5]:
station_data[(station_data['id'].isin(subway_stations)) & (station_data['validations_count'] < 80000)];
In [6]:
dates = pd.date_range(start="2015-10-01", end="2015-12-31").date

Discretized Matrix

In [7]:
matrix_6h = np.load("../datasets/6h_matrix.npy")
matrix_2h = np.load("../datasets/2h_matrix.npy")
matrix_15m = np.load("../datasets/15m_matrix.npy")

Data Analysis and Preprocessing

In [8]:
f, ax = plt.subplots(1, figsize=(16, 12))
ax = contour_iris[contour_iris['dep'].isin([75, 92, 93, 94])].plot(
    ax=ax, edgecolor='black', column='dep', cmap='icefire_r')
ax.scatter(station_data[station_data['id'].isin(subway_stations)]['x'],
           station_data[station_data['id'].isin(subway_stations)]['y'], color='firebrick', label='Subway Stations')
ax.set_xlabel('Longitude')
ax.set_ylabel('Latitude')
ax.set_title('Subway Stations in ÃŽle de France')
ax.legend()

plt.show();

Min Max Normalization

Below we apply Min Max Normalization to data, with a scale range of $[0, 1]$.

In [9]:
data_matrix_6h = pd.Panel(normalize(matrix_6h), 
                         items=dates, 
                         major_axis=subway_stations, 
                         minor_axis=generate_times("6h")
                        )

data_matrix_2h = pd.Panel(normalize(matrix_2h), 
                         items=dates, 
                         major_axis=subway_stations, 
                         minor_axis=generate_times("2h")
                        )

data_matrix_15m_complete = pd.Panel(matrix_15m, 
                                    items=dates, 
                                    major_axis=subway_stations, 
                                    minor_axis=generate_times("15min")
                                   )

Delete the first $4$ hours, from $00.00.00$ to $04.00.00$ because it's useless, the number of validations in that range is mostly equal to 0.

In [10]:
del_hours = 0
In [11]:
data_matrix_15m = data_matrix_15m_complete.iloc[:, :, del_hours*4:]
In [12]:
data_matrix_15m.to_frame().head()
Out[12]:
2015-10-01 2015-10-02 2015-10-03 2015-10-04 2015-10-05 2015-10-06 2015-10-07 2015-10-08 2015-10-09 2015-10-10 ... 2015-12-22 2015-12-23 2015-12-24 2015-12-25 2015-12-26 2015-12-27 2015-12-28 2015-12-29 2015-12-30 2015-12-31
major minor
198 00:00:00 38.0 70.0 57.0 26.0 26.0 39.0 46.0 53.0 46.0 70.0 ... 26.0 20.0 21.0 12.0 35.0 11.0 11.0 27.0 29.0 0.0
00:15:00 20.0 49.0 67.0 13.0 10.0 20.0 31.0 30.0 29.0 84.0 ... 14.0 23.0 21.0 19.0 36.0 14.0 17.0 11.0 18.0 0.0
00:30:00 13.0 39.0 48.0 18.0 4.0 4.0 9.0 26.0 33.0 50.0 ... 13.0 5.0 2.0 11.0 22.0 8.0 13.0 36.0 12.0 0.0
00:45:00 3.0 43.0 61.0 3.0 2.0 6.0 4.0 7.0 33.0 24.0 ... 4.0 5.0 9.0 10.0 6.0 1.0 2.0 2.0 3.0 0.0
01:00:00 1.0 23.0 48.0 0.0 0.0 2.0 3.0 1.0 25.0 25.0 ... 5.0 2.0 5.0 1.0 9.0 2.0 1.0 2.0 2.0 0.0

5 rows × 92 columns

In [13]:
dmatrix_mean_6h = data_matrix_6h.mean()
dmatrix_mean_2h = data_matrix_2h.mean()
dmatrix_mean_15m = data_matrix_15m.mean()

dtmatrix_mean_6h = dmatrix_mean_6h.transpose()
dtmatrix_mean_2h = dmatrix_mean_2h.transpose()
dtmatrix_mean_15m = dmatrix_mean_15m.transpose()

Again, this is another way to print the stations with a small number of validations.

In [14]:
data_matrix_15m.mean(axis=0)[data_matrix_15m.mean(axis=0).sum(axis=1) < 810];
In [15]:
dmatrix_mean_15m.head()
dtmatrix_mean_15m.head()
Out[15]:
2015-10-01 2015-10-02 2015-10-03 2015-10-04 2015-10-05 2015-10-06 2015-10-07 2015-10-08 2015-10-09 2015-10-10 ... 2015-12-22 2015-12-23 2015-12-24 2015-12-25 2015-12-26 2015-12-27 2015-12-28 2015-12-29 2015-12-30 2015-12-31
00:00:00 36.481848 53.389439 75.155116 19.254125 20.247525 27.996700 31.950495 38.940594 55.052805 63.435644 ... 34.145215 29.422442 21.141914 16.544554 29.531353 19.092409 24.069307 30.462046 31.683168 0.026403
00:15:00 28.102310 46.851485 69.412541 14.712871 14.689769 20.224422 24.848185 31.382838 50.775578 60.805281 ... 28.765677 24.570957 20.815182 14.442244 27.132013 18.973597 21.132013 25.815182 27.603960 0.016502
00:30:00 18.356436 37.458746 62.561056 12.412541 9.049505 12.963696 15.481848 19.828383 40.574257 51.204620 ... 20.867987 17.425743 17.937294 11.023102 24.696370 12.749175 15.009901 18.755776 20.019802 0.036304
00:45:00 8.052805 29.128713 52.521452 4.584158 4.171617 5.821782 6.333333 8.603960 30.920792 40.554455 ... 8.716172 7.933993 12.669967 7.858086 17.557756 5.709571 6.425743 8.336634 9.696370 0.013201
01:00:00 2.587459 23.132013 46.643564 1.920792 1.768977 1.943894 2.069307 2.422442 25.405941 35.693069 ... 2.640264 2.557756 8.993399 5.201320 12.785479 2.092409 2.343234 2.570957 3.079208 0.003300

5 rows × 92 columns

Out[15]:
00:00:00 00:15:00 00:30:00 00:45:00 01:00:00 01:15:00 01:30:00 01:45:00 02:00:00 02:15:00 ... 21:30:00 21:45:00 22:00:00 22:15:00 22:30:00 22:45:00 23:00:00 23:15:00 23:30:00 23:45:00
2015-10-01 36.481848 28.102310 18.356436 8.052805 2.587459 0.495050 0.214521 0.165017 0.105611 0.168317 ... 70.927393 66.881188 65.399340 60.584158 60.755776 64.207921 75.128713 64.590759 52.138614 43.818482
2015-10-02 53.389439 46.851485 37.458746 29.128713 23.132013 19.742574 15.320132 6.973597 2.069307 0.491749 ... 78.590759 73.495050 70.907591 65.983498 66.211221 62.917492 66.330033 63.188119 57.755776 54.673267
2015-10-03 75.155116 69.412541 62.561056 52.521452 46.643564 41.693069 37.009901 23.323432 8.752475 2.306931 ... 82.211221 77.181518 73.755776 72.805281 74.570957 76.264026 83.947195 80.049505 76.587459 72.772277
2015-10-04 19.254125 14.712871 12.412541 4.584158 1.920792 0.455446 0.165017 0.059406 0.092409 0.059406 ... 43.570957 43.818482 41.745875 37.462046 36.722772 37.283828 52.759076 37.158416 31.211221 25.254125
2015-10-05 20.247525 14.689769 9.049505 4.171617 1.768977 0.435644 0.227723 0.108911 0.085809 0.099010 ... 61.039604 52.346535 53.125413 46.059406 45.498350 41.036304 41.254125 35.336634 28.003300 22.801980

5 rows × 96 columns

With Outliers

In [16]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dtmatrix_mean_15m.plot(ax=ax[0], legend=False)
ax1.set_xticklabels([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('15min')

ax2 = dtmatrix_mean_2h.plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Number of Validations')
ax2.set_title('2h')
ax2.legend(bbox_to_anchor=(1., 1.01))

ax3 = dtmatrix_mean_6h.plot(ax=ax[2])
ax3.set_xlabel('Days')
ax3.set_ylabel('Number of Validations')
ax3.set_title('6h')
ax3.legend(bbox_to_anchor=(1., 1.01))

plt.xticks(rotation=90)
plt.show();
In [17]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dtmatrix_mean_15m.plot.area(ax=ax[0], legend=False)
ax1.set_xticklabels([])
ax1.set_ylabel('Time')
ax1.set_title('15min')

ax2 = dtmatrix_mean_2h.plot.area(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Time')
ax2.set_title('2h')
ax2.legend(bbox_to_anchor=(1., 1.01))

ax3 = dtmatrix_mean_6h.plot.area(ax=ax[2])
ax3.set_xlabel('Days')
ax3.set_ylabel('Time')
ax3.set_title('6h')
ax3.legend(bbox_to_anchor=(1., 1.01), loc=2)

plt.xticks(rotation=90)
plt.show();
In [18]:
fig = plt.figure(figsize=(16, 6))
gs = gridspec.GridSpec(1, 1)
ax = fig.add_subplot(gs[0])
dmatrix_mean_15m.plot(ax=ax, legend=False)
plt.ylabel('Number of Validations')
plt.title('15min')

plt.xticks(rotation=90)
plt.show();
In [19]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dmatrix_mean_15m.iloc[:, :31].plot(ax=ax[0])
ax1.set_xticklabels([])
ax1.set_ylabel('Days')
ax1.set_title('October\'s number of validations')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)


ax2 = dmatrix_mean_15m.iloc[:, 31:61].plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Days')
ax2.set_title('November\'s number of validations')
ax2.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax3 = dmatrix_mean_15m.iloc[:, 61:].plot(ax=ax[2])
ax3.set_xlabel('Time')
ax3.set_ylabel('Days')
ax3.set_title('December\'s number of validations')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();
In [20]:
f, ax = plt.subplots(2, figsize=(16, 12))

ax1 = dtmatrix_mean_15m.boxplot(return_type='both', ax=ax[0])
ax[0].set_xlabel("Time", fontsize=15)
ax[0].set_ylabel("Number of Validations", fontsize=15)

for tick in ax[0].get_xticklabels():
    tick.set_rotation(90)

ax2 = dmatrix_mean_15m.boxplot(return_type='both', ax=ax[1])
plt.xticks(rotation=90)

plt.tight_layout()
plt.show();

Defining useful variables

In [21]:
from __init__ import *
In [22]:
wd_15m = data_matrix_15m.loc[dict_w.values()]
wdm_15m = wd_15m.mean()
wdmt_15m = wdm_15m.transpose()

wd_15mf = data_matrix_15m.loc[dict_wd_final.values()]
wdm_15mf = wd_15mf.mean()
wdmt_15mf = wdm_15mf.transpose()

Without outliers

In [23]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = wdm_15m.loc[:, dict_wd_oct.values()].plot(ax=ax[0])
ax1.set_xticklabels([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('Octobre')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)


ax2 = wdm_15m.loc[:, dict_wd_nov.values()].plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Number of Validations')
ax2.set_title('Novembre')
ax2.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax3 = wdm_15m.loc[:, dict_wd_dec.values()].plot(ax=ax[2])
ax3.set_xlabel('Time')
ax3.set_ylabel('Number of Validations')
ax3.set_title('Decembre')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();
In [24]:
f, ax = plt.subplots(2, figsize=(16, 8))

ax1 = wdm_15mf.loc[:, dict_wd_novf.values()].plot(ax=ax[0])
ax1.set_xticks([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('Novembre')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax2 = wdm_15mf.loc[:, dict_wd_decf.values()].plot(ax=ax[1])
ax2.set_xlabel('Time')
ax2.set_ylabel('Number of Validations')
ax2.set_title('Decembre')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)
plt.tight_layout()

plt.show();
In [25]:
f, ax = plt.subplots(2, figsize=(16, 12))

ax1 = wdmt_15mf.boxplot(return_type='both', ax=ax[0])
ax[0].set_xlabel("Time", fontsize=15)
ax[0].set_ylabel("Number of Validations", fontsize=15)

for tick in ax[0].get_xticklabels():
    tick.set_rotation(90)

ax2 = wdm_15mf.boxplot(return_type='both', ax=ax[1])
plt.xticks(rotation=90)

plt.tight_layout()
plt.show();
In [26]:
fig, (ax1, ax2) = plt.subplots(2, figsize=(16, 12))

wdm_15mf.plot(ax=ax1, legend=False)
ax1.set_ylabel('Number of Validations'); ax1.set_title('15min')

ax2 = wdmt_15mf.plot(ax=ax2, legend=False)
ax2.set_ylabel('Number of Validations'); ax2.set_title('15min')

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();

Autocorrelation Plots

In [27]:
fig, (ax1, ax2) = plt.subplots(2, figsize=(16, 12))

autocorrelation_plot(wdmt_15mf.mean(), ax=ax1, c='blue')
ax1.set_title('15min discretization matrix')

plot_acf(wdmt_15mf.mean(), ax=ax2, c='blue', title='Auto Correlation')

plt.show();
In [28]:
plt.figure(figsize=(16, 7))

lag_plot(wdmt_15mf.mean(), c='blue')
plt.title('Lag plot 15min discretization matrix')

# plot_pacf(wdmt_15mf.mean(), ax=ax[1], c='blue', title='Partial Auto Correlation')

plt.show();

Splitting Data into Train and Test

In [29]:
dico = dict_wd
size = 45
In [30]:
X = data_matrix_15m.loc[dico.values()]
Xm = X.mean()
Xmt = Xm.transpose()
In [31]:
kw = list(dico.keys())
np.random.shuffle(kw)

vw = [dico[i] for i in kw]
In [32]:
ind_train = vw[:size]
ind_test = vw[size:]
X_train = X[ind_train]
X_test = X[ind_test]
In [33]:
X_train
X_test
Out[33]:
<class 'pandas.core.panel.Panel'>
Dimensions: 45 (items) x 303 (major_axis) x 96 (minor_axis)
Items axis: 2015-12-18 to 2015-12-09
Major_axis axis: 198 to 60982
Minor_axis axis: 00:00:00 to 23:45:00
Out[33]:
<class 'pandas.core.panel.Panel'>
Dimensions: 21 (items) x 303 (major_axis) x 96 (minor_axis)
Items axis: 2015-12-03 to 2015-10-19
Major_axis axis: 198 to 60982
Minor_axis axis: 00:00:00 to 23:45:00

Models

Baseline

In [34]:
def baseline_plot_results(levels):
    """
    
    """
    
    baseline_scores = []
    baseline_preds = []
    for level in levels:
        b = Baseline(level=level, first_ndays=5)
        b.fit(X_train)
        baseline_preds.append(b.predict(X_test))
        baseline_scores.append(b.score(X_test))
    
    df_baseline_scores = pd.DataFrame(np.array(baseline_scores).T,
                                 index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'],
                                 columns=levels)
    display(HTML(df_baseline_scores.to_html()))
    pd.DataFrame(df_baseline_scores.loc['RMSE'].values.repeat(4).reshape(-1, 4).T,
                 columns=levels).plot(figsize=(16, 4), kind='area');
    
    return df_baseline_scores
In [35]:
levels = ["None", "s", "j", "sj"]
df_baseline_scores = baseline_plot_results(levels)
None s j sj
R2 0.108362 0.930679 0.106301 0.920043
RMSE 336.292884 93.768448 336.681257 100.704955
MSE 113092.903518 8792.521915 113354.268867 10141.487900
MAE 1.951394 0.444717 2.006559 0.483100
MAPE 195.139440 44.471745 200.655931 48.309983
MPE -174.160842 -24.836670 -180.126630 -28.111394
In [36]:
from cost_functions import mse, mse_g

class myAR(Regressor):
    def __init__(self, order=4, level=None, loss=mse, loss_g=mse_g, max_iter=1000,
                 eps=0.01):
        """ Initialisation des paramètres du perceptron

        :param order: Taille de la fenêtre glissante
        :param loss: fonction de coût
        :param loss_g: gradient de la fonction coût
        :param max_iter: nombre maximum d'itération de la fonction coût
        :param eps: pas du gradient


        """

        self.order = order
        self.max_iter, self.eps = max_iter, eps
        self.loss, self.loss_g = loss, loss_g
        self.w = np.random.random(self.order)
                      
    
    @Regressor.datax_decorator
    def analytic_fit(self, datax):
        """ Finds the optimal weigths analytically 
        
        :param datax: contient tous les exemples du dataset
        :returns: void
        :rtype: None
        
        """
        
        self.X, self.y = datax
        A, B = self.X.T.dot(self.X), self.X.T.dot(self.y)
        self.w = np.linalg.solve(A, B).ravel()
        display(HTML(pd.DataFrame(self.w.reshape(1, -1), index=['Weights'], 
                                  columns=range(1, len(self.w)+1)).to_html()))
       

    def minibatch_fit(self, datax):
        """ Mini-Batch gradient descent Learning

        :param datax: contient tous les exemples du dataset
        
        """

        for _ in range(self.max_iter):
            for d in range(datax.shape[0]):
                for t in range(datax.shape[2] - self.order):
                    batchx = datax.iloc[d, :, t:t + self.order].values
                    batchy = datax.iloc[d, :, t + self.order].values
                    self.w -= (self.eps * self.loss_g(batchx, batchy, self.w))
                   

    def predict(self, datax):
        """ Predict labels

        :param datax: contient tous les exemples du dataset
        :returns: predicted labels
        :rtype: numpy array

        """

        y_pred = []
        for d in range(datax.shape[0]):
            y_pred.append([])
            for t in range(datax.shape[2] - self.order):
                batchx = datax.iloc[d, :, t:t + self.order].values
                y_pred[d].append(batchx.dot(self.w.T))

        return np.array(y_pred).transpose(0, 2, 1)

    def forecast_n(self, datax):
        """ Predict labels

        :param datax: contient tous les exemples du dataset
        :returns: predicted labels
        :rtype: numpy array

        """

        y_pred = []
        for d in range(datax.shape[0]):
            y_pred.append([])
            batchx = datax.iloc[d, :, 0:self.order].values
            for t in range(datax.shape[2] - self.order):
                next_y = batchx.dot(self.w.T)
                y_pred[d].append(next_y)
                batchx = np.hstack(
                    (batchx[:, 1:], np.array(next_y).reshape(-1, 1)))

        return np.array(y_pred).transpose(0, 2, 1)
    
    def transform_batchx(self, batchx, tplus):
        """
        """
        if tplus == 1:
            return batchx
        
        for _ in range(tplus-1):
            next_y = batchx.dot(self.w.T)
            if batchx.ndim == 2:
                batchx = np.hstack((batchx[:, 1:], 
                                    np.array(next_y).reshape(-1, 1)))
            elif batchx.ndim == 1:
                batchx = np.hstack((batchx[1:], next_y))
                
        return batchx
    
    
    def forecast(self, datax, tplus=None):
        """ Predict labels

        :param datax: contient tous les exemples du dataset
        :param tplus: if t equal to 2, means predicting what happened at t+2
        :returns: predicted labels
        :rtype: numpy array
        
        """
                
        if tplus == None or tplus > self.order:
            return self.forecast_n(datax)
        else:
            y_pred = []
            batch_ind = self.order - tplus
            
            if datax.ndim == 3:
                for d in range(datax.shape[0]):
                    y_pred.append([])
                    # Take the first batch
                    batchx = datax.iloc[d, :, 0:self.order].values
                    # Predict till we finish the first round of tplus
                    for _ in range(tplus):
                        next_y = batchx.dot(self.w.T)
                        y_pred[d].append(next_y)
                        batchx = np.hstack((batchx[:, 1:], 
                                            np.array(next_y).reshape(-1, 1)))
                        
                    # After the first round of tplus, we have to replace some
                    # predicted values by the real ones and simultaneously 
                    # replace the following columns by t+1,..., tplus
                    for t in range(1, datax.shape[2] - self.order - tplus + 1): 
                        batchx = self.transform_batchx(
                            datax.iloc[d, :, t:self.order+t].values, tplus)
                        next_y = batchx.dot(self.w.T)
                        # next_y = np.where(next_y < 0, 0, next_y)
                        y_pred[d].append(next_y)
                        
                        
            elif datax.ndim == 2:
                # TODO
                pass
            elif datax.ndim == 1:
                batchx = datax.iloc[0:self.order].values
                
                for _ in range(tplus):
                    next_y = batchx.dot(self.w.T)
                    y_pred.append(next_y)
                    batchx = np.hstack((batchx[1:], next_y))
                        
                for t in range(1, datax.shape[0] - self.order - tplus + 1):
                    batchx = self.transform_batchx(
                            datax.iloc[t:self.order+t].values, tplus)
                    next_y = batchx.dot(self.w.T)
                    # if next_y < 0: next_y = 0
                    y_pred.append(next_y)
                    
                return np.array(y_pred)
            else:
                raise ValueError("Untreated datax number of dimensions")
                                        
        return np.array(y_pred).transpose(0, 2, 1)
    
In [37]:
class theAR(Baseline):
    def __init__(self, level=None, first_ndays=7, **kwargs):
        """
        
        """
        
        super().__init__(level, first_ndays)
        self.kwargs = kwargs
        
    def fit(self, datax):
        """
        
        """
        
        if self.level is None:
            self.model = myAR(**self.kwargs)
            self.model.analytic_fit(datax)
        elif self.level.lower() == "s":
            self.models = []            
            for s in range(datax.shape[1]):
                Xs = datax.iloc[:, s].T
                self.models.append(myAR(**self.kwargs))
                self.models[s].analytic_fit(Xs)
        elif self.level.lower() == "j":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean().mean(axis=1))                
        elif self.level.lower() == "sj":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean(axis=0))
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def predict(self, datax, tplus=None):
        """
        
        """
        
        def predict_per_station(x, tplus):
            pred_s = []
            for s in range(x.shape[0]):
                pred_s.append(self.models[s].forecast(x.iloc[s], tplus))
            return np.array(pred_s)
        
        if self.level is None:
            return self.model.forecast(datax, tplus)
        elif self.level.lower() == "s":
            return datax.apply(
                lambda x: predict_per_station(x, tplus), axis=(1, 2))
        elif self.level.lower() == "j":
            # TODO
            pass
        elif self.level.lower() == "sj":
            # TODO
            pass
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def score(self, datax, tplus=None):
        """
        
        """
        
        X_pred = self.predict(datax, tplus)
       
        try:
            df_X = datax.iloc[:, :, self.model.order:]
            return super().metrics_score(df_X, X_pred)
        except:
            df_X = datax.iloc[:, :, self.models[0].order:]            
            return super().metrics_score(df_X, X_pred.values)

    
In [38]:
def ar_plot_results(level, order, limit_t):
    """
    
    """
    
    ar_scores = []
    ar_preds = []
    ar = theAR(level=level, order=order)
    print("Fitting...")
    ar.fit(X_train)
    print("Predicting...")
    for t in tqdm(range(1, limit_t+1)):
        ar_preds.append(ar.predict(X_test, t))
        ar_scores.append(ar.score(X_test, t))
        
    print("Scoring...")
    display(HTML((pd.DataFrame(np.array(ar_scores).T, 
                               index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'], 
                               columns=list(map(
                                   lambda x: "t+"+str(x),
                                   range(1, len(ar_scores)+1))))).to_html()))
    return ar_scores, ar_preds

def plot_qualitative_analysis(*args):
    """
    
    """
    
    fig, ax = plt.subplots(limit_t+1, figsize=(16, limit_t*4))

    wd_testorder_15m = args[1].iloc[:, :, order:]
    wdm_testorder_15m = wd_testorder_15m.mean()

    wdm_testorder_15m.plot(ax=ax[0])
    ax[0].set_ylabel('Number of Validations')
    ax[0].set_title('Test')
    ax[0].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
               borderaxespad=0.)

    for i in range(1, limit_t+1):
        pred_t = create_panel_pred(*args).mean()
        pred_t.plot(ax=ax[i])
        ax[i].set_ylabel('Number of Validations')
        ax[i].set_title("Predict t+{}".format(i))
        ax[i].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
                     borderaxespad=0.)

    plt.tight_layout()
    plt.show();

Order=16, T=12

One AR for all

In [39]:
order = 16
limit_t = 12
In [40]:
%%time
ar_scores, ar_preds = ar_plot_results(None, order, limit_t)
Fitting...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.032393 -0.010881 -0.012412 -0.048519 0.033852 0.053783 0.006139 -0.073935 0.083525 0.039009 -0.025478 -0.174355 -0.041806 -0.186543 0.269965 1.02947
Predicting...
Scoring...
t+1 t+2 t+3 t+4 t+5 t+6 t+7 t+8 t+9 t+10 t+11 t+12
R2 0.959885 0.917616 0.847844 0.763602 0.654799 0.551067 0.452221 0.368425 0.293396 0.236637 0.193950 0.162264
RMSE 77.056366 110.427725 150.072445 187.058711 226.043809 257.778705 284.747051 305.751763 323.403246 336.141429 345.411993 352.135621
MSE 5937.683518 12194.282388 22521.738692 34990.961185 51095.803382 66449.860794 81080.883283 93484.140620 104589.659451 112991.060350 119309.445030 123999.495440
MAE 0.264305 0.353468 0.441430 0.519554 0.595976 0.654446 0.707719 0.748601 0.785668 0.811570 0.832963 0.851917
MAPE 26.430536 35.346849 44.143016 51.955420 59.597566 65.444588 70.771904 74.860103 78.566784 81.157028 83.296268 85.191684
MPE -0.913098 0.484733 1.602675 2.324734 2.009731 2.114889 1.682568 1.332940 0.515250 0.851296 1.873107 3.252648
CPU times: user 15.8 s, sys: 1.49 s, total: 17.3 s
Wall time: 16.8 s
In [127]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="Full AR")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Full baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of full baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [49]:
plot_qualitative_analysis(ar_preds, X_test, i, order, subway_stations, del_hours)

AR per station

In [51]:
order = 16
limit_t = 12
In [54]:
%%time
ar_scores_s, ar_preds_s = ar_plot_results("s", order, limit_t)
Fitting...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.012777 0.003692 0.010484 -0.005068 -0.012645 0.004586 0.010796 0.013117 0.036879 0.017727 -0.000998 -0.135979 -0.217031 0.012857 0.357408 0.850632
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.070906 -0.066496 -0.06578 0.029764 0.045726 0.050325 0.020553 -0.008282 -0.000198 0.031239 -0.160143 -0.157955 0.112702 0.115061 0.207067 0.741535
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.012232 0.013243 0.050888 -0.076472 -0.006178 -0.037304 0.011978 0.038125 0.102707 0.030967 -0.017053 -0.241636 0.046859 -0.242818 0.153354 1.122444
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.068544 -0.044609 0.009225 -0.005992 -0.032572 0.047319 0.007188 -0.021762 0.090877 -0.075277 0.082151 -0.261293 0.017633 -0.103433 0.469447 0.727876
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.021177 0.015701 0.11088 -0.125829 0.078172 0.012526 -0.008895 -0.120868 0.114826 -0.015639 -0.121748 -0.195778 0.291674 -0.024102 0.152749 0.846422
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.0041 -0.019852 0.066211 -0.020579 0.01537 -0.043082 -0.005779 -0.046017 0.154337 0.004069 0.057059 -0.27962 0.052854 -0.138814 0.071601 1.094179
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.007053 -0.007253 -0.013361 0.079003 0.018822 -0.034227 -0.030887 -0.05107 0.158644 -0.0459 0.08723 -0.406109 0.064416 0.015924 0.375758 0.774208
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.030446 0.018634 0.00833 -0.066185 -0.046056 0.009814 0.037934 0.050736 0.110357 0.068544 -0.255715 -0.118066 -0.018115 -0.02703 0.084839 1.083139
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.057635 -0.062252 0.02025 -0.066776 0.099825 -0.01513 0.008269 -0.085341 0.146146 -0.016089 0.066567 -0.285068 -0.003774 -0.024056 0.039027 1.100759
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.020614 -0.002541 0.06552 -0.165189 0.070337 0.071544 -0.016493 -0.106294 0.130304 0.010561 -0.002801 -0.086946 0.014234 -0.208553 -0.159403 1.339137
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.038146 -0.045494 0.037711 0.011463 -0.027831 -0.01954 0.052562 0.075511 -0.034518 -0.066787 0.110308 -0.174803 -0.166482 -0.110478 0.67329 0.613238
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.030121 -0.01552 0.017302 -0.018858 0.04176 -0.014249 0.009765 -0.09693 0.07592 0.023421 -0.056299 -0.114205 0.10066 -0.090831 0.234921 0.854029
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.046009 0.088787 -0.054341 -0.044641 0.13049 0.091332 -0.084295 -0.19343 0.13911 -0.289861 0.274811 -0.112249 0.258543 -0.041611 0.236657 0.61553
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.049848 0.009447 -0.002007 -0.035326 0.014657 0.017703 -0.010887 -0.062913 0.021411 0.024613 0.048496 -0.181349 0.068322 -0.185027 0.44071 0.757017
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.036826 -0.005316 0.083374 0.042217 -0.037391 -0.018747 -0.027235 -0.142214 0.276592 -0.017866 -0.119705 -0.288272 0.228146 -0.073783 0.252032 0.862302
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.031645 0.071568 0.010415 -0.11892 0.099415 -0.015781 0.049437 -0.072287 0.107842 -0.04309 0.003294 -0.235472 0.134193 -0.123792 0.039687 1.092882
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.029069 0.048262 0.040451 -0.124878 0.057809 0.006066 0.0256 -0.073542 0.187554 -0.055863 -0.193738 -0.161299 0.303051 -0.021139 0.326405 0.637786
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.040217 -0.072344 0.092747 -0.074862 0.054592 -0.010486 -0.000561 -0.062538 0.157169 0.007185 -0.105651 -0.156954 0.05985 -0.124689 0.219014 0.956
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.046626 -0.041022 -0.020528 0.001533 0.007983 0.114513 -0.019124 -0.066687 0.120939 -0.009357 -0.151268 -0.231931 0.13447 0.145642 0.319758 0.619679
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.032765 0.015139 0.011652 -0.044735 0.05873 0.043193 -0.019663 -0.18239 0.008293 0.008725 -0.00082 -0.142711 0.11835 0.022698 0.163735 0.89086
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.035554 0.093072 -0.016227 -0.087451 0.050402 0.057013 0.00273 -0.107281 0.08075 0.041878 0.096046 -0.301497 0.190365 -0.343288 0.135255 1.127156
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007389 -0.004141 0.097077 -0.097039 -0.003072 -0.015041 -0.007323 -0.001625 0.183706 -0.01802 -0.048028 -0.198845 -0.047467 -0.071281 0.207393 0.992191
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.056408 -0.036406 0.004903 -0.067228 0.050673 0.009987 0.001846 -0.016003 0.179273 -0.013771 -0.152004 -0.17257 0.06137 -0.174578 0.303184 0.938057
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.005674 0.029895 0.016149 -0.109594 0.088228 -0.000153 -0.001834 -0.080001 0.110229 0.096815 -0.096514 -0.107751 0.062543 -0.322083 0.120392 1.154194
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.028028 -0.010064 -0.006487 -0.046932 0.009397 0.056158 0.027844 -0.054789 0.074928 -0.012431 -0.036774 -0.034377 -0.113278 -0.141246 0.055814 1.17423
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.03007 -0.013158 0.025247 -0.021214 -0.015025 -0.011215 0.046371 0.032294 0.07878 -0.052519 -0.145824 -0.299852 0.30266 -0.159519 0.402021 0.772909
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.010698 -0.032275 0.008529 -0.041922 0.11008 0.064229 -0.024789 -0.11526 0.160209 -0.134998 -0.078008 -0.320671 0.267609 0.030062 0.274103 0.80255
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.03446 -0.040052 -0.063707 0.043887 0.050074 0.053302 -0.051168 0.022485 0.06756 -0.103279 -0.080904 -0.090343 -0.067133 0.07001 0.312361 0.822988
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.063708 -0.091718 -0.029587 0.011816 0.107275 0.039437 -0.039407 -0.047238 0.071725 -0.058235 -0.066629 -0.17617 0.221189 -0.173454 0.377489 0.771083
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.047944 -0.039757 0.00246 -0.005799 0.017022 0.049592 -0.028172 0.023145 0.040128 -0.024556 -0.052835 -0.295347 0.177739 0.044727 0.25884 0.75229
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.052139 -0.02889 -0.00899 -0.111544 0.0853 0.133379 0.004564 -0.156973 0.040882 -0.07515 0.213009 -0.313093 0.19635 -0.218242 0.173484 0.987946
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.068616 -0.078339 -0.087718 0.013178 0.109397 0.076024 0.009557 -0.115456 0.045141 0.052117 -0.026629 -0.27145 0.103088 -0.275196 0.384978 0.967369
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.051152 -0.100634 0.123789 -0.124601 0.13273 -0.101197 0.042778 0.022152 -0.009495 -0.018103 0.186111 -0.243603 0.007395 -0.251567 0.115879 1.143247
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.097241 -0.068903 -0.034724 -0.024847 0.050739 -0.001446 0.003286 -0.038721 0.176135 0.006639 -0.084119 -0.337154 0.118218 -0.025487 0.199531 0.94038
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.02445 0.021124 -0.01948 -0.082149 0.080338 0.033347 -0.015806 -0.050208 0.049355 0.01737 0.031504 -0.234898 0.101677 -0.166971 0.255188 0.909908
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.017095 0.04531 -0.009097 0.026996 0.018379 -0.009098 0.019686 -0.070744 0.087364 0.003149 -0.166778 -0.290124 0.397125 0.122932 0.092633 0.708568
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.058909 0.015053 -0.034612 0.140665 0.054225 -0.027761 0.02034 -0.121091 0.014107 0.062496 -0.040929 -0.406407 0.293994 -0.006351 0.234922 0.844797
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.035888 -0.059511 0.04049 -0.076603 0.139841 -0.040957 0.019943 -0.080335 0.120947 -0.009647 -0.079232 -0.210823 0.108767 0.061863 0.168599 0.831269
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.034163 -0.010602 0.052965 -0.107149 0.047189 -0.0095 0.029659 -0.019786 0.139373 -0.002636 -0.079067 -0.211796 0.021504 -0.045295 0.255511 0.889993
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.03298 -0.087728 -0.00644 0.081439 0.015747 0.008851 0.070561 -0.154496 0.027037 0.141809 -0.008201 -0.413844 0.329484 -0.311763 0.213403 1.048205
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.024628 -0.10759 0.031439 0.020438 0.058861 0.137862 -0.122183 -0.113457 0.157963 0.009877 -0.018162 -0.356179 0.216575 -0.229303 0.231096 1.031937
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.010669 0.003477 -0.010196 0.004442 0.110149 -0.043125 -0.000173 -0.080541 0.066318 -0.108474 -0.12052 -0.086278 0.145504 0.109099 0.400658 0.575467
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.078358 -0.199935 0.202154 -0.009641 0.104608 -0.152976 -0.044807 -0.120002 -0.284252 0.405395 0.059907 -0.09443 0.201706 0.072036 0.052569 0.689025
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.011024 -0.034978 -0.011542 0.011112 0.131294 0.016397 -0.04942 -0.052366 0.01662 -0.05313 -0.032698 -0.23793 0.077927 0.181161 0.273024 0.726374
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.060111 -0.075221 0.005515 -0.009682 0.031793 0.002126 0.051555 0.043483 -0.027963 -0.047849 -0.002948 -0.209815 0.039978 0.018057 0.208049 0.874693
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.00544 0.006257 -0.002617 -0.046143 0.045586 0.016492 0.018657 0.000959 0.084438 -0.056917 -0.084977 -0.167401 0.030196 -0.038546 0.280844 0.883927
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.058577 -0.071806 0.077125 -0.127086 0.067367 0.077172 -0.00896 -0.127994 0.040931 0.09047 0.141353 -0.234288 0.040462 -0.446221 0.259462 1.146601
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.03663 0.138175 -0.046926 -0.08458 0.065514 -0.050321 0.069496 -0.073965 0.175087 -0.054505 -0.092391 -0.132528 0.064827 -0.022354 0.216872 0.839408
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.026391 0.054214 0.066113 -0.18026 0.122747 -0.003053 -0.00938 -0.077131 0.085764 0.051678 0.077578 -0.315867 0.110762 -0.17367 0.080654 1.121716
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.043625 -0.011597 -0.05018 0.013248 0.00327 0.029194 0.04262 -0.044813 0.061154 -0.010309 -0.020218 -0.15878 -0.077702 -0.110902 0.326732 0.940135
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.020328 -0.028269 -0.002506 -0.04527 0.172563 -0.048881 -0.012295 -0.134898 0.09661 0.001286 -0.096477 -0.250951 0.367263 0.12864 0.148209 0.651405
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.00458 0.008326 0.027053 -0.08848 0.067816 0.01072 -0.049812 0.054086 0.083126 -0.052342 0.0047 -0.348005 0.266045 -0.164544 0.075832 1.066478
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.041994 -0.026836 0.007557 -0.035393 0.030309 0.04948 0.00437 -0.053111 0.120778 -0.076402 -0.043029 -0.192543 0.088358 -0.037333 0.386632 0.708089
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.005404 -0.050156 -0.007511 0.059409 0.099323 0.040413 -0.026486 -0.038734 -0.191325 -0.02056 0.031652 -0.057945 0.078603 -0.006724 0.219635 0.846337
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.035941 -0.060691 -0.003575 -0.001503 0.052957 0.041296 0.060955 -0.027829 -0.025006 0.061541 -0.143714 -0.422298 0.500487 -0.251043 0.202296 0.96039
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.045561 0.040327 -0.049568 -0.040728 0.014045 0.010492 -0.011196 0.054925 -0.016314 -0.037079 -0.055262 -0.08277 -0.106775 0.151671 0.435403 0.612594
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.022475 0.024616 0.09709 -0.080242 -0.075182 0.054471 -0.114776 0.172928 0.058654 -0.024654 -0.148034 -0.220181 0.17297 0.307619 0.085594 0.612886
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.021436 0.023722 -0.030629 -0.093931 0.062134 0.071672 0.022841 -0.082826 0.071282 0.015737 -0.076325 -0.177544 0.09721 -0.133485 0.044876 1.148346
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.01249 -0.041092 0.042078 0.050053 0.021804 -0.023427 -0.016151 -0.08358 0.121428 -0.064015 -0.037371 -0.132041 0.209803 -0.043846 0.175735 0.771091
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.022389 0.003826 -0.051792 0.055939 -0.00056 -0.031768 0.003242 0.010442 0.017434 0.019077 -0.008088 0.029267 -0.067566 -0.263632 0.03525 1.18399
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.000471 -0.032818 0.035597 0.019247 0.034345 0.021859 0.029782 -0.08502 0.054482 -0.074614 -0.055261 -0.111956 0.090929 -0.042583 0.322006 0.761582
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.042323 -0.033115 0.071846 -0.148857 0.025991 0.047828 0.039062 -0.115269 0.283165 -0.050206 -0.092008 -0.376262 0.205369 -0.050489 0.048313 1.083156
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.026808 -0.002245 0.022277 -0.008939 0.025819 -0.028628 -0.036109 0.14451 -0.053697 -0.098113 -0.139897 -0.13775 0.287604 0.18866 0.137631 0.629043
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.011515 0.042124 0.024578 -0.000755 0.030426 -0.060736 -0.161164 -0.096895 0.23612 -0.038611 0.021149 -0.251918 0.302711 -0.013615 0.241056 0.684616
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.047074 -0.005885 -0.037039 -0.033899 0.026002 0.032328 0.050044 -0.019004 0.043109 -0.030036 -0.128238 -0.203995 0.007569 0.150629 0.346179 0.727384
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.01133 0.018072 0.101041 -0.123986 0.116938 -0.009546 -0.312136 0.149556 0.088258 0.0475 0.060484 -0.2918 0.237948 0.021955 -0.043666 0.90574
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.024037 0.065234 0.009815 -0.065795 0.029599 0.007777 0.011154 -0.142247 0.190587 0.065701 -0.018798 -0.251279 0.082484 -0.192951 0.015588 1.185993
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.078433 -0.019103 -0.038677 -0.064092 0.052658 -0.000775 0.013423 0.055429 0.072489 -0.073593 -0.014974 -0.12217 -0.123879 -0.048471 0.377741 0.817215
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.031976 -0.058641 0.048078 -0.030914 0.011354 0.010481 0.047728 -0.172707 0.18195 0.108055 -0.023838 -0.363874 0.155206 -0.175342 -0.073621 1.279914
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.022588 -0.06314 0.152244 -0.048477 0.052538 -0.088727 0.031974 -0.126971 0.227204 0.139709 -0.252338 -0.321218 0.301636 -0.019045 0.148696 0.859772
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.013614 0.045375 -0.026451 -0.11454 0.143333 -0.041621 0.03704 -0.114032 0.117004 -0.035847 0.156325 -0.197537 0.015577 -0.190895 -0.100907 1.260158
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.044319 0.005917 -0.057334 0.01864 -0.010006 0.015491 0.039896 0.042104 0.010366 -0.120434 -0.090571 -0.037765 -0.016035 0.10568 0.367326 0.648441
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.031425 -0.013703 -0.033584 -0.00344 0.020801 0.018669 -0.00073 0.0284 0.08981 -0.022096 -0.067872 -0.298034 0.013955 -0.03333 0.462099 0.774002
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.024466 -0.02328 0.093602 -0.009173 0.077246 -0.063465 -0.126341 -0.038121 0.190794 0.054416 0.038763 -0.350384 0.08849 -0.022847 -0.089933 1.178287
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.013118 0.007118 -0.009846 -0.007287 -0.008082 -0.00804 0.036293 0.082575 -0.053404 0.011225 -0.073918 -0.113194 -0.002758 -0.06545 0.344063 0.748191
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.009463 -0.03094 0.058173 0.014628 0.03334 0.035632 0.035935 -0.10198 0.013098 -0.066909 -0.0499 -0.109823 0.133288 0.050965 0.353142 0.625548
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.020888 0.010963 0.057921 0.032636 0.07473 0.024191 -0.010484 -0.043722 -0.128365 -0.088561 -0.028723 -0.07419 0.482871 0.111633 0.063942 0.449102
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.013655 -0.055063 0.051368 0.034221 -0.022909 -0.010178 0.019491 -0.002404 0.079698 -0.007274 -0.038284 -0.448265 0.21501 -0.065576 0.469227 0.738744
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.010764 0.041424 -0.062472 -0.040311 0.066868 0.054921 0.020939 -0.022292 -0.024295 -0.052081 -0.032351 -0.159744 -0.012613 -0.03034 0.317454 0.905755
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.015416 0.080196 -0.064014 0.014087 0.03019 -0.043313 0.000375 -0.029205 0.102476 0.022117 0.048658 -0.220702 -0.077046 -0.119132 0.191365 1.05059
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.021403 0.025286 0.054926 -0.02876 0.033237 -0.037342 0.029336 -0.002367 0.033815 -0.04825 0.022475 -0.166559 0.068525 0.049033 0.246185 0.699558
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.022248 0.00504 -0.017632 -0.020299 0.054892 -0.023758 -0.010753 0.003934 0.010789 0.086556 -0.051312 -0.149744 -0.075766 -0.047122 0.312347 0.876945
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.029184 -0.014233 0.018276 -0.032182 0.008815 0.025397 -0.01532 -0.063131 0.126037 -0.003507 0.095905 -0.261932 0.098287 -0.365109 0.297197 1.02855
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.084061 -0.167228 0.111619 -0.053232 0.08955 -0.03625 -0.007961 -0.046162 0.083335 0.055331 0.055917 -0.349606 0.131639 -0.020226 -0.126253 1.173806
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.096594 0.030835 -0.114115 -0.384179 0.385037 0.025209 0.072412 -0.065028 -0.038432 -0.028719 0.079259 -0.097308 0.039847 -0.021344 0.061684 0.921784
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.023298 0.027501 0.003735 -0.107803 0.030068 -0.02599 0.062935 0.051586 0.071906 0.01414 -0.09321 -0.255917 0.042822 -0.081977 0.14102 1.065995
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.043294 -0.057592 0.058901 -0.051422 -0.00196 0.049122 -0.01317 -0.005244 0.028077 -0.00044 0.047242 -0.117024 0.040331 -0.335854 0.204849 1.072338
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.029536 0.046266 -0.046553 -0.125143 0.034277 0.131674 -0.039501 -0.039024 0.147352 0.005788 -0.104731 -0.260878 -0.010141 -0.001662 0.302292 0.909032
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.045117 -0.033591 0.002121 -0.096616 0.132386 -0.001056 0.047558 -0.043626 0.045322 -0.058529 0.032025 -0.230674 0.045596 -0.015972 0.25601 0.844039
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.043723 -0.076781 0.060925 -0.009646 -0.002075 0.000471 0.002067 -0.027238 0.08152 0.035794 0.006629 -0.199096 -0.036107 -0.179603 0.265697 1.004094
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.011824 0.046009 0.017206 0.005733 0.0091 0.023301 -0.023355 -0.045686 0.009949 -0.010975 -0.032047 0.025566 0.079678 0.162631 0.232552 0.455739
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.002402 -0.023094 0.018377 -0.018068 0.0694 -0.051267 0.078271 -0.00813 0.032363 -0.047203 -0.043779 -0.147027 0.068551 -0.047194 0.350103 0.730648
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.040878 -0.056853 0.072516 -0.115349 0.087388 0.04106 -0.079587 -0.07863 0.171982 0.059094 -0.019051 -0.218024 0.122122 -0.345324 0.072668 1.224389
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.017805 -0.030216 0.055578 -0.044378 0.058095 -0.026066 -0.034432 -0.039198 0.000891 0.179306 0.131293 -0.309626 0.0188 -0.403433 0.31188 1.089411
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.032146 0.008194 -0.020964 -0.04341 0.031422 -0.015691 0.063315 0.02505 0.007939 -0.024565 -0.031503 -0.113637 -0.089746 -0.040644 0.270535 0.905721
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.048221 0.077508 0.012188 -0.13086 0.210513 -0.002692 0.041788 -0.25242 0.106141 -0.056241 -0.02481 -0.189534 0.159427 -0.065251 0.251977 0.887056
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.088029 -0.149264 0.045698 -0.044568 0.088718 0.090783 -0.002618 -0.144989 -0.00647 0.134054 0.045273 -0.197954 -0.003219 -0.237935 0.352764 0.910289
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.058718 -0.042597 -0.022242 0.014692 0.031213 -0.008169 0.024799 -0.058802 0.111278 0.013104 -0.054742 -0.259773 0.157013 -0.165203 0.255121 0.92667
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.03672 -0.023802 0.024679 -0.043643 0.082272 -0.09428 0.024232 -0.05705 0.121596 0.059467 0.070038 -0.259212 -0.026325 -0.206752 0.139149 1.132069
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.005645 0.033188 -0.02197 -0.026689 0.076677 -0.0282 -0.140814 -0.027233 0.172038 0.208582 -0.021579 -0.271482 -0.179426 -0.223632 0.340568 1.074303
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.043205 -0.040017 -0.017483 -0.011273 0.051933 0.027188 -0.01083 -0.05432 0.156006 -0.017407 -0.063194 -0.345798 0.14493 -0.084139 0.366779 0.830025
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.02475 -0.054648 0.022512 0.019016 -0.048954 0.107099 0.01234 -0.064871 -0.009785 0.068299 0.041227 -0.291874 0.155494 -0.381337 0.447692 0.920975
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.042895 -0.077085 0.0252 -0.0774 0.111396 0.054346 -0.016307 -0.033054 0.061895 -0.044473 -0.014237 -0.234032 0.131611 -0.159781 0.362721 0.83958
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.004583 -0.038724 0.042599 0.015793 0.078121 0.014992 -0.044123 -0.072932 0.085949 -0.073951 -0.040888 -0.252367 0.206804 -0.031579 0.393959 0.706907
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.03292 -0.014045 -0.059117 0.040053 0.012884 -0.002795 0.052948 0.016656 -0.004769 0.039779 -0.096845 -0.168543 0.013607 -0.054306 0.196196 0.963588
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.101647 0.006351 -0.008289 -0.031245 0.250694 0.04265 0.056049 -0.186421 0.064713 -0.117066 -0.092173 -0.276994 0.302986 -0.079204 0.19661 0.949528
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.057501 -0.110061 0.089932 -0.124474 0.12565 0.063848 -0.047597 -0.053349 0.150243 0.011036 -0.099392 -0.232904 0.072173 0.047808 0.150916 0.883326
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.071435 -0.01506 -0.106093 -0.033295 0.072091 0.134994 -0.032036 -0.106253 0.00648 0.13399 0.009557 -0.268246 0.055638 -0.093443 0.14382 1.005123
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.005302 0.004333 -0.01741 -0.002984 0.025893 0.064066 -0.025406 0.016048 -0.026357 -0.035722 -0.112582 -0.083827 0.013187 0.092293 0.32391 0.733616
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.024951 0.014956 0.038899 -0.057175 -0.075061 0.030828 -0.005079 -0.001826 0.16319 0.037909 -0.071843 -0.183555 -0.070556 -0.142963 0.163295 1.103153
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.019699 -0.005372 -0.018371 -0.036478 0.090782 -0.020941 0.030946 0.01998 0.043788 -0.051809 -0.058836 -0.215192 0.078944 -0.008083 0.42868 0.673162
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.071488 -0.035663 -0.043951 -0.04648 0.055272 0.086468 -0.085787 -0.045725 0.196189 0.05324 -0.123083 -0.217968 -0.099636 -0.041171 0.385628 0.860262
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.003505 0.065532 -0.035153 -0.051114 -0.006021 0.086769 0.005833 -0.013693 0.071568 0.029981 -0.16792 -0.198023 0.064692 0.102555 0.160429 0.85757
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.05272 -0.033493 -0.030412 -0.031621 0.047681 0.068638 -0.051721 -0.035482 0.147171 0.038493 -0.19883 -0.326242 0.269857 -0.234671 0.473871 0.818985
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.020541 -0.055219 0.053443 0.005065 0.053503 -0.024846 -0.004746 -0.039078 0.087784 -0.05784 -0.058423 -0.146166 0.094008 -0.014676 0.413348 0.648798
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.018665 -0.010049 0.021514 -0.006677 0.042455 -0.032112 0.024034 -0.066983 0.021612 0.008646 0.176432 -0.262062 0.034473 -0.117531 0.122245 0.99008
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.036403 -0.006463 -0.096538 0.026499 0.063928 0.072838 -0.060842 0.015217 0.065704 -0.029951 -0.166761 -0.187337 0.053882 0.052383 0.241433 0.897911
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.08171 -0.055172 0.041705 -0.08315 0.032996 -0.02817 -0.011189 0.024175 0.105192 -0.059919 -0.073772 -0.142244 0.090922 -0.065705 0.310383 0.798607
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.008002 0.045652 -0.034531 -0.096759 0.073259 0.06736 -0.021757 -0.005692 0.058016 -0.080167 -0.000121 -0.154333 0.066364 -0.08602 0.182071 0.954498
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.050504 -0.0359 0.027123 -0.015188 0.012524 0.006431 0.003749 0.007697 0.035433 -0.009706 -0.035974 -0.107502 -0.000235 0.000541 0.340988 0.674029
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.046986 -0.026735 -0.013857 -0.005559 0.039919 -0.011972 -0.012505 -0.031916 0.112903 0.002491 0.044546 -0.257833 0.091025 -0.166002 0.166796 0.983336
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.057385 -0.125638 0.150826 -0.076465 -0.023869 0.058089 -0.062053 0.06466 0.043397 0.139258 -0.1075 -0.568213 0.519669 -0.237258 0.158484 0.980093
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.014655 0.018614 -0.001373 -0.008682 -0.035333 0.020366 0.021685 -0.012226 0.087849 0.067966 -0.145597 -0.158161 -0.05746 0.050889 0.214073 0.891754
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.065459 -0.120091 0.10474 -0.045997 0.038065 -0.047799 0.023269 0.021578 0.065904 -0.014742 0.025909 -0.314993 0.158225 -0.093919 0.181161 0.92171
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.02747 -0.04505 0.030301 -0.003036 0.021076 -0.004478 -0.02827 -0.00128 0.134406 0.016599 -0.070739 -0.293385 0.150239 -0.269391 0.357771 0.954902
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.05019 0.008343 0.035621 -0.155514 0.169412 -0.093933 -0.052599 -0.072744 0.191624 -0.072764 -0.062114 -0.331418 0.251381 0.237876 0.127623 0.745261
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.028742 -0.100683 0.119278 -0.073455 0.062661 -0.027847 0.094725 -0.112326 0.065378 -0.047786 0.06401 -0.168085 -0.031281 -0.061843 0.212972 0.954378
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.048382 0.031194 0.066517 -0.059964 0.084445 0.050817 -0.137098 -0.052245 0.123822 -0.065138 -0.082074 -0.184314 0.210265 -0.020553 0.213561 0.84802
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.014751 0.006493 -0.019664 0.004988 0.028227 0.013304 0.01648 -0.005039 0.039455 -0.137295 0.032405 -0.204857 0.096573 -0.085961 0.434874 0.735476
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.019312 -0.017321 0.037063 0.004566 0.013272 0.023405 0.01082 -0.008076 0.104316 0.019944 -0.272234 -0.28205 0.254751 -0.185728 0.589545 0.699324
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.003738 -0.044262 0.067875 -0.02351 0.043506 0.038961 -0.101068 -0.086032 0.200006 0.053897 0.022683 -0.363905 0.126564 -0.212171 0.284083 0.961778
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007892 -0.018429 -0.031471 0.025588 0.163688 -0.039287 -0.014803 -0.156901 0.131819 -0.142545 0.093591 -0.287415 0.269555 0.295183 0.036807 0.623946
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.051939 -0.021443 -0.012176 -0.058961 0.020463 0.02211 0.078418 -0.09383 0.138995 -0.018758 -0.072967 -0.16159 0.012352 -0.100034 0.179785 1.006144
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.001804 0.053035 0.007653 -0.199389 0.062886 0.19894 -0.083901 0.034063 0.066775 -0.080923 -0.007606 -0.432962 0.310856 -0.175516 0.218551 1.005855
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.016711 -0.002366 0.002287 -0.030215 0.037496 0.015565 0.04741 -0.063121 0.07353 -0.0434 -0.157294 -0.063535 0.02933 0.055001 0.106695 0.953447
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.018473 0.029103 0.02735 -0.005834 0.039045 -0.035022 -0.004094 -0.14666 0.181378 -0.001526 0.046218 -0.291408 0.21352 -0.271823 0.296914 0.924118
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.018222 -0.021937 0.020552 -0.009765 0.013364 -0.015913 0.011375 -0.064308 0.026839 -0.032188 0.006114 -0.072949 0.046077 -0.022638 0.106779 0.976447
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.042499 -0.001048 0.016425 -0.109841 0.073032 -0.031458 0.034841 -0.048289 0.092828 0.036213 0.065607 -0.161278 -0.130296 -0.301897 0.352785 1.043872
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.000921 -0.041635 0.047398 0.0014 0.066742 0.063762 -0.059387 -0.17249 0.134518 0.023192 -0.143781 -0.258264 0.305054 -0.123322 0.227013 0.911939
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.059957 -0.018294 0.050487 -0.109843 0.028933 -0.029721 0.136976 -0.090096 0.094268 -0.065178 -0.048928 -0.117926 0.095115 -0.012938 0.267632 0.74837
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.01509 -0.001691 -0.001663 -0.113775 0.13097 0.076792 0.022827 -0.073 0.008103 -0.167665 0.076025 -0.145988 0.14016 -0.077333 0.1822 0.903339
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.008804 0.026959 0.026426 -0.060414 -0.00364 0.005567 -0.024532 -0.047418 0.185988 0.040429 -0.023012 -0.190826 -0.03762 -0.247329 0.17215 1.142266
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.029361 0.027883 -0.03003 -0.031383 -0.05095 0.075654 0.022915 -0.002678 0.044224 -0.014263 -0.084218 -0.300437 0.173451 -0.016683 0.282107 0.855041
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.021067 -0.027547 0.060387 -0.010585 -0.01338 -0.046658 -0.006519 -0.017905 0.173438 0.029598 0.007682 -0.355068 0.035691 -0.062442 0.247857 0.933318
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.037119 -0.008718 -0.00693 -0.0485 0.036879 0.020927 0.0011 -0.033331 0.11729 -0.000734 -0.033665 -0.155252 -0.036311 -0.141766 0.231018 1.000283
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.001118 -0.003564 0.016867 0.007243 0.055688 -0.004492 -0.007243 -0.024344 0.074045 -0.037996 -0.047757 -0.258589 0.172404 -0.046419 0.342517 0.737506
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.040661 -0.053319 -0.060551 0.027978 0.131417 0.110172 -0.051725 -0.119297 -0.090817 -0.02308 -0.024014 -0.03235 0.056434 0.005997 0.212917 0.84296
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.005549 -0.077818 0.071868 -0.010121 0.08099 -0.027176 -0.002715 -0.079147 0.113399 -0.053622 0.029315 -0.200069 0.062291 -0.126835 0.226108 0.966195
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.023416 0.045129 -0.000781 -0.133412 0.021706 0.062059 0.026518 -0.051683 0.091772 0.073296 -0.162642 -0.173947 0.038727 0.006819 0.006927 1.101997
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.003367 -0.005385 -0.018718 -0.036172 0.055792 0.113861 -0.030716 -0.066541 0.097665 -0.011835 -0.08086 -0.300784 0.225878 -0.226567 0.417341 0.837311
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.060304 -0.003189 -0.111096 -0.049964 0.142811 0.042782 -0.04429 -0.056377 0.053919 0.078697 -0.084276 -0.168906 -0.035896 -0.07499 0.25376 0.970166
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.040289 -0.073248 0.11492 -0.16261 0.120563 -0.017993 0.033716 -0.038751 0.06296 -0.025102 0.040569 -0.242239 0.12443 -0.15397 0.066248 1.087063
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.041967 0.015371 0.085768 -0.006743 -0.016057 -0.065455 -0.004315 0.054704 0.143606 0.088906 -0.198905 -0.423662 0.377178 -0.132138 0.336023 0.755393
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.105117 -0.020623 -0.100392 -0.055643 0.081792 0.108686 -0.04109 -0.101864 0.006471 0.057205 -0.085897 -0.15355 0.059512 -0.021397 0.273128 0.869601
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.080745 -0.001446 -0.099342 -0.017205 0.065527 0.012905 -0.039362 0.04464 0.064228 -0.084089 -0.077652 -0.17566 0.060613 0.047996 0.306935 0.778246
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.047103 -0.028885 -0.016517 0.025598 0.059377 -0.053694 0.018529 -0.052583 0.075975 -0.020988 -0.018408 -0.104562 0.031481 -0.035179 0.330476 0.720127
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.037781 -0.115419 0.038691 0.086121 0.069096 -0.030521 -0.008499 -0.088386 0.028893 -0.04186 -0.057159 -0.171561 0.174072 0.049645 0.288424 0.714056
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.016783 -0.012308 0.031997 -0.030217 0.032894 -0.056018 0.013914 -0.015221 0.035383 0.088947 0.070637 -0.178582 -0.119046 -0.187564 0.092648 1.184103
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.095328 -0.130409 -0.003638 -0.005298 0.107225 0.055328 -0.010191 -0.063667 -0.13808 0.109556 0.182056 -0.28226 -0.006293 -0.0478 0.110132 0.997811
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007573 0.01592 0.032387 -0.042558 -0.055769 0.025193 -0.007358 0.00373 0.170987 -0.022878 -0.137162 -0.203784 0.017959 0.141019 0.12647 0.910965
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.014826 0.012437 -0.036141 -0.022163 0.156294 -0.024847 0.064239 -0.180443 0.079037 0.073827 -0.171038 -0.32625 0.401882 -0.181972 0.212442 0.935176
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.05761 -0.049401 0.008551 -0.084107 0.128043 -0.017942 -0.003594 -0.094003 0.140683 0.026444 0.013332 -0.232536 0.014873 -0.207778 0.277698 1.004852
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.002944 0.053992 -0.05544 -0.073748 0.082671 0.021244 0.047005 -0.102302 0.117929 -0.040256 -0.031364 -0.2305 0.07928 0.019927 0.116147 0.975796
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.041962 0.00134 -0.059011 -0.084537 0.101194 0.042012 0.015935 -0.067597 0.113221 -0.007768 -0.060263 -0.144897 -0.112561 -0.013415 0.357949 0.856259
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.012069 -0.004673 0.067761 -0.045123 0.063764 0.059264 -0.070158 -0.048072 0.013892 -0.017078 -0.076634 -0.186438 0.196229 -0.120843 0.435948 0.70855
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.026026 0.017338 -0.067061 -0.086249 0.236975 0.119618 -0.002373 -0.169684 -0.005592 -0.094523 0.090327 -0.15228 0.035266 -0.130305 0.265482 0.941067
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.015114 -0.009812 0.081248 -0.036269 0.018859 -0.017875 0.044084 -0.085744 0.115538 -0.113896 -0.102873 -0.172771 0.223057 -0.015786 0.267392 0.803271
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007575 -0.009283 0.018001 0.029069 -0.005007 -0.022002 0.036614 -0.044691 0.006117 0.042053 -0.108937 -0.141883 0.019308 0.084415 0.375224 0.68969
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.029815 0.019705 -0.02783 -0.001971 0.062376 0.088982 -0.060142 -0.045749 0.069249 0.015172 -0.092432 -0.209543 0.100367 -0.125288 0.229778 0.989324
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.010406 -0.026262 0.086423 -0.069845 0.037342 -0.047258 0.010546 -0.003532 0.106455 0.01156 0.00723 -0.21984 -0.019661 -0.061131 0.237383 0.924083
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.001965 0.039196 -0.004593 -0.098682 0.057056 0.075634 -0.044058 -0.017327 0.101268 -0.003275 -0.034239 -0.205989 -0.063885 -0.052183 0.24712 0.98551
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.009709 0.047011 -0.025996 -0.041665 -0.005833 0.009168 -0.027735 0.019914 0.161974 0.001612 -0.041057 -0.209143 -0.127926 -0.089282 0.271247 1.026726
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.038163 0.024223 -0.010918 -0.150861 0.081213 0.035222 0.048928 -0.089706 0.099124 0.075711 -0.054527 -0.20908 -0.025892 -0.108226 0.135811 1.090477
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.012531 0.034682 0.017748 -0.085021 0.001142 0.063199 -0.056757 -0.056494 0.105874 0.119728 -0.028909 -0.135215 -0.209318 -0.134602 0.303557 1.019992
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.055643 -0.060026 0.011199 0.006423 0.009513 -0.026638 0.023781 -0.003244 0.148024 -0.01214 -0.111598 -0.225719 0.118302 -0.081447 0.142793 0.972498
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.025663 0.005006 -0.006439 -0.094186 0.03266 0.081547 0.059269 -0.114635 0.045073 -0.01939 0.021356 0.071234 -0.200001 -0.285833 0.124318 1.229889
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.022149 -0.04235 -0.020818 0.034799 0.071414 0.036451 0.013369 -0.044554 0.019547 -0.012634 -0.106523 -0.235963 0.220697 -0.048924 0.248743 0.822421
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.018411 0.052504 -0.029711 -0.108075 0.042548 0.014329 0.045647 -0.021038 0.090736 0.043719 -0.047865 -0.212243 -0.087085 -0.072213 0.306195 0.94583
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.014788 0.002305 -0.039274 -0.037756 0.134311 0.030617 -0.06166 -0.115376 0.165771 -0.018395 0.012837 -0.335129 0.153788 -0.134192 0.273171 0.937316
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.033198 0.007575 -0.048681 0.004808 -0.004234 0.036439 -0.015983 -0.017775 0.098013 0.017612 -0.053052 -0.096809 -0.186049 -0.069916 0.311174 0.944578
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.033393 -0.037913 -0.011534 0.079841 -0.058593 0.051529 0.060666 -0.094477 0.02689 -0.001421 -0.095444 0.045172 -0.026528 0.230184 -0.447138 1.206835
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.046112 -0.022863 -0.043065 -0.026477 0.072777 0.06727 -0.063836 -0.073969 0.098407 0.064258 -0.013475 -0.112422 -0.177899 -0.16192 0.504362 0.802994
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.033995 -0.035644 0.00454 0.002997 0.02429 0.016162 -0.022324 -0.018157 0.110396 0.058508 -0.214849 -0.318079 0.287607 -0.180436 0.623089 0.595482
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.017522 0.010227 -0.015517 -0.020335 0.030664 0.025912 -0.069898 -0.00903 0.094466 0.01754 0.097506 -0.209434 -0.100117 -0.210854 0.320691 0.99154
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.040087 -0.011679 0.06433 0.004281 0.049255 0.00468 0.022119 -0.031274 0.007872 -0.098615 -0.051085 -0.242171 0.183916 0.073564 0.318855 0.721753
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.014119 -0.02065 0.007504 0.001356 0.055592 0.045842 -0.044827 -0.001625 0.006659 0.002274 0.065904 0.059976 0.054314 0.069237 0.066763 0.566723
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.004054 -0.021511 0.054313 -0.032175 0.025937 -0.012034 -0.004323 0.052556 0.025172 0.031573 -0.0367 -0.333916 0.243135 -0.181476 0.291132 0.86133
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.020252 0.031025 -0.035727 -0.021528 0.071156 0.012156 0.045396 -0.119859 0.067668 0.140999 -0.166444 -0.260855 0.275678 -0.331848 0.054851 1.228566
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.042154 -0.034628 0.032271 -0.074501 0.023143 0.025741 0.058461 -0.083102 0.102386 0.001268 -0.045591 -0.201978 0.049884 -0.093277 0.246646 0.932123
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.028333 -0.043681 0.003789 -0.07344 0.132942 0.074845 -0.071999 -0.027302 0.041998 0.015114 0.013794 -0.246978 0.079263 -0.209848 0.414933 0.835078
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.063309 0.103172 -0.033818 -0.0394 0.059958 0.030194 0.013871 0.03585 0.014753 -0.032552 -0.140262 -0.242622 0.255275 0.010675 0.109763 0.889195
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.000817 0.043527 -0.025466 -0.038744 0.006584 0.02682 0.017104 -0.007096 0.083095 -0.01145 -0.075688 -0.144341 -0.027476 -0.019989 0.095002 1.055082
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.023709 -0.012 0.032808 -0.039401 0.066426 0.054415 -0.029422 -0.08055 0.078058 -0.01489 -0.057447 -0.16402 0.066714 -0.091444 0.232741 0.967665
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.077051 -0.013986 -0.044433 -0.058186 0.048833 0.00992 0.068363 -0.07647 0.080144 0.028009 -0.066903 -0.213567 0.091747 0.007244 0.097677 0.949904
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.012434 0.038524 -0.016721 -0.024799 0.000285 -0.033953 0.068788 0.013975 0.10697 -0.052414 -0.104265 -0.132084 0.024654 -0.03741 0.250093 0.857346
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.023419 0.041568 0.026316 -0.104085 0.132893 -0.043702 -0.011562 -0.09261 0.127604 0.00518 0.012732 -0.306746 0.093517 -0.033627 0.247694 0.913705
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.032101 -0.029759 0.012499 -0.065986 0.062713 0.075406 -0.038522 -0.044433 0.125177 -0.024725 -0.069776 -0.257908 0.186787 -0.207697 0.345402 0.878036
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.084172 -0.046973 0.002898 -0.123352 0.084038 0.051826 0.016031 -0.159529 0.236626 -0.03424 -0.034472 -0.2605 0.01081 -0.020866 0.278841 0.892929
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.018035 -0.00486 0.055224 -0.070307 0.028374 -0.025091 -0.025727 0.019096 0.139605 0.003501 0.026337 -0.272849 -0.054201 -0.096467 0.306053 0.925682
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.11329 -0.096876 0.028797 -0.044582 0.035709 0.069961 0.010827 -0.051503 0.000209 -0.044822 -0.052796 0.024889 0.053968 0.063522 0.068636 0.796279
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.019138 0.016432 -0.009881 -0.110994 0.09583 0.036807 0.00145 -0.083852 0.125946 -0.018403 -0.024805 -0.120225 -0.021379 -0.200882 0.191242 1.08702
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.013355 0.010449 -0.035732 -0.005942 0.064241 -0.001384 -0.003776 -0.112393 0.145602 0.000761 0.0562 -0.231986 -0.052985 -0.14175 0.40293 0.87438
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.113203 0.061499 -0.021243 -0.005905 0.108804 0.075091 0.105623 -0.216362 0.198041 -0.098402 -0.209798 -0.428607 0.436136 -0.099645 0.229796 0.951887
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.070448 -0.047012 0.03603 -0.158439 0.114116 0.020769 0.033841 -0.139036 0.133661 0.073542 -0.029562 -0.212385 0.02869 -0.117033 -0.001064 1.170654
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.036064 -0.078547 0.050495 -0.016725 -0.014747 0.087852 0.049442 0.005079 0.014462 -0.101561 -0.151674 -0.232117 0.235079 -0.068712 0.41812 0.732872
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007725 -0.041682 0.060021 -0.070747 0.007786 0.15338 -0.018728 -0.022239 0.083825 -0.064848 -0.247295 -0.328701 0.42043 -0.154493 0.361832 0.822066
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.023964 -0.037571 0.087021 -0.026546 -0.022805 -0.051714 -0.011233 0.015008 0.14166 0.02715 0.001144 -0.230491 -0.090567 -0.100167 0.309414 0.941571
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.01015 0.019662 0.028242 -0.03675 -0.031067 -0.016583 0.087707 0.043732 0.025803 -0.057216 -0.130802 -0.024696 0.063825 0.060676 0.217762 0.692463
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.053973 -0.138047 0.048896 0.099511 -0.034597 0.084756 -0.054644 -0.05827 0.041536 0.008716 -0.05426 -0.425146 0.407578 -0.050899 0.295758 0.748424
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.003839 -0.015594 0.029258 0.014267 0.033114 0.018136 -0.007377 -0.076745 0.03248 0.102306 -0.007309 -0.233687 -0.086695 -0.055751 0.500788 0.72691
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.073268 -0.022714 -0.09327 0.02037 0.032519 -0.050179 0.038129 0.05538 0.090129 0.011763 -0.125141 -0.236983 0.034545 -0.000805 0.200445 0.947175
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.025954 -0.010828 0.113812 -0.113255 0.087134 -0.108576 0.035935 -0.094903 0.140418 -0.106044 -0.112301 -0.199385 0.435934 0.210239 0.084945 0.588224
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.051258 -0.066692 -0.009472 0.013624 -0.048696 0.09371 0.07677 -0.038431 0.02979 0.122896 -0.327624 -0.396376 0.502022 -0.309565 0.425077 0.850784
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.012397 -0.025314 0.006239 0.025547 0.029657 -0.030719 0.025528 -0.034809 0.120827 0.062069 -0.184162 -0.36001 0.238961 -0.11264 0.399089 0.798336
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.017771 0.000077 0.011265 -0.026537 0.01615 0.11968 -0.019113 -0.175668 0.178566 0.072504 -0.109462 -0.258778 0.143357 -0.19155 0.153986 1.079439
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007718 -0.030557 0.099812 -0.064679 0.048422 -0.019587 -0.015025 -0.082899 0.157542 -0.069956 0.069181 -0.242525 0.129687 -0.037785 0.09073 0.940679
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.054926 -0.009392 0.095968 -0.072922 0.050612 0.08095 -0.01791 -0.019248 0.021607 -0.016407 -0.058005 -0.266692 0.266523 -0.222901 0.347427 0.854859
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.070378 0.062252 0.053937 0.043505 0.017224 -0.030044 -0.023718 0.026399 0.031718 -0.018949 -0.12043 -0.11263 0.145559 0.228506 0.07284 0.643869
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.04681 -0.161054 0.238929 0.024203 0.045899 0.034506 0.016022 -0.199524 0.050209 -0.086008 -0.024883 -0.160893 0.147546 0.041986 0.313983 0.74184
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.035134 -0.040652 -0.010145 -0.005148 0.024337 0.058606 -0.020716 0.024673 0.093757 -0.061789 -0.108056 -0.357057 0.214607 -0.109586 0.40569 0.828205
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.019642 0.019719 -0.000982 -0.049193 -0.028505 0.054748 -0.004253 -0.013984 0.094714 0.048907 -0.085359 -0.135704 -0.081915 -0.12503 0.144562 1.120576
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.088009 -0.066928 0.013031 -0.117947 0.05782 0.121915 -0.066493 -0.04479 0.114299 0.065586 -0.042981 -0.354828 0.151633 -0.255494 0.406442 0.90655
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.117142 0.084139 0.105161 -0.103779 0.116397 -0.017071 0.089349 -0.195602 0.049716 0.006063 -0.061783 -0.186722 0.121874 -0.045074 0.177125 0.950939
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.054206 -0.047271 -0.01968 -0.060799 0.063972 0.132623 -0.108366 0.054388 -0.081729 0.111712 -0.062082 -0.199058 0.019218 -0.036728 0.143616 1.005195
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.019273 0.04289 -0.093896 -0.111393 0.135781 0.070941 0.048332 -0.078044 0.087587 0.016147 -0.129876 -0.33413 0.225989 -0.193433 0.429559 0.833134
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.041786 0.026271 0.031072 -0.036414 0.027565 -0.032466 0.032155 -0.024549 0.005802 -0.032409 0.040487 -0.049013 0.032015 0.05017 0.191272 0.675744
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.105553 0.138262 0.130994 -0.098797 0.089463 -0.129295 -0.259966 0.143925 0.214826 -0.084503 -0.045388 -0.295278 0.310978 -0.054614 0.279763 0.73469
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.008359 0.078148 -0.08907 -0.027633 0.07153 -0.032707 0.062113 -0.037286 0.122694 -0.054893 -0.050378 -0.194012 0.049108 -0.070225 0.141298 1.008532
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.041094 -0.051669 -0.037119 0.063609 0.036753 0.013162 -0.005523 -0.11547 0.083743 0.212475 -0.186082 -0.494239 0.420193 -0.268033 0.347535 0.911257
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.039859 0.010704 -0.120419 -0.028712 0.140241 0.065077 0.013361 -0.116001 0.0737 -0.021128 -0.022506 -0.274391 0.041421 0.035728 0.231613 0.907296
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.014193 -0.017529 0.0082 0.013569 0.003217 -0.029587 0.0176 0.037632 0.042296 0.026735 -0.147872 -0.126197 -0.052148 -0.020635 0.315681 0.880612
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.016122 0.01964 0.084894 -0.046138 0.018548 -0.051585 0.001486 -0.030774 0.043667 -0.044923 0.013303 -0.195709 0.166882 -0.086166 0.254382 0.847464
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.024843 0.00362 -0.01339 -0.041416 0.043485 0.00532 -0.05896 0.016813 0.137645 0.072719 -0.129483 -0.176503 -0.073694 -0.134593 0.339735 0.944382
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.043349 -0.028152 0.098382 -0.014898 0.010687 0.073811 0.004503 -0.118053 0.103353 -0.074465 -0.046365 -0.218286 0.129091 0.000316 0.312145 0.784367
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.012996 -0.006295 -0.001115 -0.003328 0.024663 0.00963 -0.00875 -0.007466 0.030717 0.005295 -0.011789 -0.081109 0.057277 -0.169497 0.007458 1.09323
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.032595 -0.04315 0.041223 -0.014911 0.032887 0.025938 -0.012852 -0.01308 0.020594 0.047157 -0.075198 -0.235368 0.237178 -0.116492 0.225988 0.815961
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.037531 -0.022383 -0.013191 -0.058176 0.040853 0.045161 0.029891 0.021845 0.034884 -0.058376 -0.066213 -0.165706 -0.06788 -0.06869 0.447129 0.838485
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.050527 0.009312 -0.036595 -0.044968 0.040034 -0.016639 -0.006765 0.054378 0.052532 0.056109 -0.179111 -0.23538 0.187816 -0.160303 0.585748 0.605494
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.035756 -0.004974 0.009816 -0.103259 0.080399 0.017872 0.067697 -0.108908 -0.013901 0.019799 -0.044429 -0.120874 0.069105 0.039848 0.162925 0.870676
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.025118 0.061749 -0.091528 -0.039419 0.026527 0.018789 0.026746 -0.049182 0.102107 0.060982 -0.077433 -0.119128 -0.007431 -0.29162 0.206461 1.125625
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.038853 0.010038 -0.057961 0.022365 0.017016 -0.043123 0.059049 -0.002484 0.023849 0.004007 -0.071621 -0.114484 0.037968 -0.072071 0.132396 0.989128
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.032882 0.027589 0.039479 -0.023024 0.001316 0.005191 -0.002602 -0.033949 0.148358 -0.043855 -0.017156 -0.38202 0.264735 -0.117181 0.131402 0.996592
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.084787 -0.056842 -0.021698 -0.022147 -0.036171 0.031021 0.082833 0.025151 0.036173 -0.016572 -0.042628 -0.232974 -0.060522 0.012906 0.384311 0.796208
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.106352 0.015731 -0.131072 -0.00547 0.008112 0.064435 0.013294 -0.060132 -0.01529 -0.013298 -0.026751 -0.123017 0.014451 0.057729 0.270225 0.801753
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.122975 0.096595 0.148942 -0.02925 0.026925 -0.079987 -0.174484 -0.071556 0.291302 -0.094079 -0.032186 -0.30898 0.409298 -0.001249 0.132907 0.779213
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.093568 -0.008076 -0.062871 -0.109296 0.055651 0.107681 0.008962 -0.072902 0.025717 0.118429 0.022503 -0.403644 0.181931 -0.121802 0.169705 0.960858
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.031 -0.024691 0.065129 -0.083202 0.04425 0.021312 -0.004102 -0.045146 0.003022 0.017104 0.079634 -0.156073 -0.032071 -0.164095 0.405631 0.818601
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.014873 -0.02152 -0.081854 0.074944 0.092247 0.037092 -0.010265 -0.141236 0.122575 -0.061404 -0.070989 -0.261181 0.202909 0.065579 0.197143 0.82008
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.040306 0.015319 -0.100713 -0.08674 0.2061 0.001084 -0.039632 -0.102662 0.137288 -0.019786 -0.000568 -0.191062 0.092417 -0.191167 0.185077 1.042622
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.072209 0.087507 0.038692 -0.115218 0.125179 -0.006349 0.063251 -0.169811 0.089241 -0.044444 0.049955 -0.31938 0.20828 -0.181468 0.236315 0.990151
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.058409 -0.084056 0.051851 -0.003029 0.001336 -0.060543 0.051463 0.038193 0.061701 0.014901 -0.055943 -0.167416 -0.131972 -0.04505 0.425467 0.822997
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.033443 0.021783 -0.000076 -0.163715 0.103535 0.081775 -0.044017 -0.008935 0.03922 0.036982 0.093525 -0.319462 0.071272 -0.261609 0.382236 0.909192
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.021172 0.02687 0.012311 -0.04126 -0.032407 0.019174 -0.065074 0.025125 0.118261 0.130222 -0.108187 -0.187841 -0.106437 -0.095055 0.211766 1.04656
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.028102 -0.037026 -0.004892 0.014302 0.019439 0.05368 -0.029795 -0.062801 0.136325 -0.005293 -0.120259 -0.241252 0.16683 -0.025708 0.228448 0.857395
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.020566 0.007063 0.030547 -0.004793 0.052167 -0.032232 -0.043941 -0.014521 0.035901 -0.071579 -0.011373 -0.038749 0.089486 0.087725 0.150202 0.760563
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.02333 0.022997 -0.047481 -0.022853 0.03508 0.049918 0.01647 0.0188 0.001925 -0.044104 -0.097997 -0.093561 -0.017687 0.023342 0.307129 0.804869
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.033866 0.005763 -0.036855 -0.046048 0.044275 0.006295 0.016906 0.014109 0.072656 0.03686 -0.117381 -0.192738 -0.062443 -0.028003 0.331827 0.897975
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.013944 -0.025528 0.090071 -0.0453 -0.009066 -0.01087 0.020007 -0.068597 0.067928 -0.074938 -0.000276 -0.114455 0.104104 0.029436 0.29303 0.739669
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.064728 -0.046627 -0.03705 -0.029335 0.120343 -0.056587 0.046043 -0.110872 0.08308 0.015357 0.156223 -0.262891 0.020824 -0.273181 0.144652 1.144081
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.015868 -0.069435 0.135489 -0.084372 0.031596 -0.048541 0.056555 0.041025 0.127608 -0.011946 -0.23041 -0.399935 0.320715 0.086964 0.193244 0.810646
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.06945 0.00959 0.063573 -0.039888 0.039689 0.095401 -0.000544 -0.07216 0.150026 -0.057308 -0.22894 -0.448485 0.49461 -0.138784 0.308226 0.875237
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.040346 -0.051935 0.019067 0.012462 0.011933 -0.028938 0.030215 -0.050821 0.103505 -0.045324 0.081008 -0.133844 -0.013928 -0.245675 0.108409 1.132766
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.046987 -0.052173 -0.01771 0.00428 0.074402 0.000653 0.029014 -0.054397 0.085116 -0.066898 -0.011512 -0.16931 0.041026 0.019559 0.310551 0.73632
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.025345 -0.030936 0.046861 -0.042678 0.106593 0.086205 -0.093296 0.000033 0.090509 -0.069282 -0.081937 -0.348716 0.221216 -0.123818 0.43382 0.801617
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.018927 -0.055086 0.017685 0.081532 0.042994 0.050314 -0.042565 -0.045188 0.122242 -0.085448 -0.13796 -0.072309 0.123927 0.031319 0.180877 0.765217
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.037374 -0.062719 0.069442 -0.118376 0.128641 0.01482 -0.027783 -0.118562 0.091481 0.165192 0.02068 -0.392745 0.151849 -0.269431 0.23411 1.055021
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.071668 -0.047659 -0.093632 0.068687 0.031877 -0.010308 -0.013465 0.03664 0.036571 0.011158 0.009911 -0.196479 -0.032919 -0.104504 0.180346 1.026313
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.009326 -0.051514 0.086965 0.015161 0.020151 0.002073 -0.01294 -0.049751 0.070782 0.008075 -0.037281 -0.275563 0.190041 -0.122491 0.324686 0.807156
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.032496 -0.045297 -0.020263 -0.018615 0.05018 0.079148 0.008663 -0.001993 0.092807 -0.04513 -0.180456 -0.363923 0.300428 -0.111656 0.4319 0.769427
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.047002 -0.010115 -0.003141 -0.046641 -0.008028 0.03429 -0.021225 0.008907 0.11511 0.007153 -0.023406 -0.249104 0.03923 -0.128994 0.12644 1.080653
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.00814 0.028699 0.000361 -0.077195 0.084781 0.132173 -0.037605 -0.150504 0.04678 -0.042303 -0.035231 -0.172257 0.081713 0.000248 0.220152 0.911506
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.007809 -0.041196 0.027643 -0.011171 0.039864 0.013159 0.012715 -0.045298 0.039813 0.073349 -0.030441 -0.442404 0.327507 -0.125309 0.008454 1.10677
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.055607 -0.006848 -0.063115 -0.046095 0.082328 0.032233 -0.025853 -0.062188 0.081366 0.025688 0.124401 -0.270202 0.029191 -0.344296 0.302904 1.062826
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.05238 -0.024475 -0.026061 -0.02903 0.072119 -0.017926 -0.014762 -0.101546 0.158492 0.086239 0.039548 -0.326441 0.086048 -0.199433 0.022336 1.200585
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.05054 -0.124875 0.098198 -0.066683 0.081396 -0.029386 0.034441 -0.019288 0.031355 0.032206 0.040065 -0.258501 -0.022752 0.076091 0.141923 0.906898
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.022005 0.060138 -0.062321 -0.088518 0.063785 0.047075 0.015705 -0.043002 0.118056 -0.059899 0.00238 -0.224286 -0.019974 -0.025317 0.326422 0.844608
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.056911 -0.040784 0.000422 -0.047821 0.112793 -0.080622 -0.01023 0.027043 0.077099 0.033458 0.078051 -0.369561 -0.008648 -0.097783 0.350407 0.891199
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.017583 0.031245 0.031553 -0.183362 0.123331 -0.035411 0.071581 -0.09053 0.154888 -0.010023 -0.028918 -0.187665 -0.01609 -0.160186 0.206595 1.056248
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.026678 0.024935 0.012278 0.012495 -0.024322 0.016114 0.0262 0.003718 0.029753 -0.011555 -0.08003 -0.04466 -0.104793 0.007771 0.118767 1.012161
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.029954 -0.027472 0.05805 -0.034531 0.0852 0.021972 0.05228 -0.095545 -0.01452 -0.043939 -0.032974 -0.170996 0.094804 0.104986 0.365689 0.645234
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.084896 0.050519 0.019849 -0.112026 0.179325 0.103006 -0.006563 -0.159582 0.055688 -0.059993 -0.029689 -0.278033 0.207819 -0.043394 0.267808 0.875053
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.003078 0.068443 -0.000488 -0.094175 0.122126 -0.031983 0.008517 0.003298 0.104359 -0.026034 -0.102083 -0.175028 0.083207 0.039147 0.256198 0.735985
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.071117 0.132951 -0.083339 -0.045147 0.120607 0.066391 -0.11119 0.029904 0.021594 -0.031833 -0.003953 -0.245007 0.282042 -0.149847 0.192791 0.86621
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.029909 0.031655 -0.045864 -0.089939 0.071718 0.026052 0.018487 -0.007175 0.06156 -0.002258 -0.053052 -0.154597 0.034313 -0.14391 0.178146 1.020709
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.00234 0.054082 -0.014551 -0.072893 -0.00497 0.058641 0.006634 -0.034054 0.066814 0.081719 -0.099845 -0.090843 -0.152648 -0.081923 0.158897 1.101196
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.033075 -0.058924 -0.018747 -0.030608 0.115399 0.042187 -0.051487 0.034378 -0.011576 0.13009 -0.129772 -0.421084 0.302486 -0.307769 0.403052 0.945991
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.039994 -0.020365 0.030575 -0.025566 0.049173 0.021994 0.022852 -0.083447 -0.017211 0.014052 0.047517 -0.139906 -0.081594 0.260505 0.319642 0.494339
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.019695 -0.081076 0.05059 0.012537 0.050926 0.020237 -0.00275 -0.020862 -0.017162 -0.048607 -0.011106 -0.225151 0.192211 -0.146551 0.341356 0.844341
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.053419 -0.05955 -0.031899 -0.018239 0.101658 -0.031545 0.007142 0.011001 0.030711 -0.004562 0.015691 -0.140867 -0.151048 -0.154852 0.372508 0.973718
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.056364 0.062376 0.035998 -0.116589 0.12145 0.027823 0.068312 -0.135668 -0.008975 -0.024255 0.027066 -0.1264 0.008431 -0.034615 0.058586 1.073248
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.021992 0.019673 -0.01008 -0.029534 0.069208 0.016821 -0.014593 -0.034477 0.146319 0.058872 -0.170892 -0.497387 0.379685 -0.190399 0.379671 0.875002
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.022258 0.035219 -0.0141 -0.069159 -0.005807 0.08432 0.003619 0.038995 -0.004094 0.059612 -0.265589 -0.105566 0.080841 -0.014596 0.427511 0.675241
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.039262 -0.077966 0.178337 -0.02813 0.001194 0.040623 -0.035556 -0.042487 0.129934 -0.019727 -0.130567 -0.140976 0.08225 0.049492 0.212525 0.784503
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.009321 0.097037 -0.002655 -0.106922 0.181041 -0.220367 -0.149203 0.03102 0.138964 -0.086122 0.069581 -0.088274 0.15153 0.061673 0.175482 0.712473
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.010391 -0.034386 0.029163 -0.011585 0.060817 0.005579 -0.016509 -0.100422 0.161166 -0.004055 -0.024214 -0.236651 0.089077 -0.092546 0.219196 0.924492
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights -0.030007 0.006681 0.032578 -0.040136 0.088267 -0.006484 0.056884 -0.049803 0.098714 -0.119156 -0.220844 -0.17757 0.273456 0.009898 0.355878 0.697417
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.07385 -0.029368 -0.113447 0.006314 0.087651 0.060341 0.023287 -0.010091 -0.009755 -0.033039 -0.137679 -0.264879 0.151748 0.135773 0.187428 0.85104
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.00716 -0.027686 0.045402 -0.034308 0.027245 0.031847 -0.030737 0.022918 0.096584 0.017763 -0.141113 -0.245338 0.227416 -0.014338 0.074828 0.922391
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.059888 -0.145012 0.025462 0.085389 0.043862 0.066439 -0.020548 -0.131477 0.023707 -0.005146 0.133202 -0.335513 0.149168 -0.086792 0.286193 0.839925
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.02281 0.045054 -0.045685 -0.072287 0.057347 0.024897 -0.0353 -0.023981 0.151474 -0.01194 0.035994 -0.294804 0.089948 -0.296432 0.275266 1.052321
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.031545 0.02377 -0.058086 0.015504 -0.037663 0.032627 0.010097 -0.016008 0.031299 0.060349 -0.003588 -0.128209 -0.043902 -0.2341 0.189755 1.093478
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.05353 0.01286 -0.018766 -0.040497 -0.020875 -0.019509 -0.026396 0.049508 0.053126 -0.028708 0.00037 -0.150134 0.094719 0.009284 0.277525 0.725279
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Weights 0.005213 0.011872 0.003636 -0.020399 -0.027692 0.05699 0.023258 -0.070768 0.030465 0.093526 0.04906 -0.216879 -0.118059 -0.252634 0.382802 1.026355
Predicting...
Scoring...
t+1 t+2 t+3 t+4 t+5 t+6 t+7 t+8 t+9 t+10 t+11 t+12
R2 0.845500 0.808931 0.750002 0.676203 0.582587 0.492036 0.406664 0.332380 0.267131 0.217029 0.178383 0.148948
RMSE 151.224199 168.171198 192.364313 218.923816 248.564720 274.203313 296.351193 314.355486 329.359004 340.431078 348.731351 354.923317
MSE 22868.758254 28281.551997 37004.029014 47927.637163 61784.419853 75187.457031 87824.029437 98819.371734 108477.353782 115893.318714 121613.555016 125970.560940
MAE 0.608901 0.653870 0.703409 0.750556 0.801738 0.842684 0.880211 0.913559 0.944188 0.967519 0.988271 1.008943
MAPE 60.890095 65.387049 70.340904 75.055579 80.173767 84.268418 88.021095 91.355946 94.418821 96.751931 98.827083 100.894279
MPE -28.302220 -25.649025 -23.240874 -21.610105 -20.590553 -19.851917 -19.535926 -19.486887 -19.314468 -18.560218 -17.445094 -16.470107
CPU times: user 20min 43s, sys: 52.5 s, total: 21min 35s
Wall time: 20min 26s
In [55]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [128]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])
x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(ar_scores).T[1], np.array(ar_scores_s).T[1])).T
baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["Full baseline", "Baseline per station", "Baseline per day",
                    "Baseline per station and day", "Full AR", "AR per station"]
arlineObjects = plt.plot(x, model_scores, linewidth=3)
for i, m in zip(range(4), ['D', '*', '|', 'X']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Order=4, T=4

One AR for all

In [129]:
order = 4
limit_t = 4
In [130]:
%%time
ar_scores, ar_preds = ar_plot_results(None, order, limit_t)
Fitting...
1 2 3 4
Weights -0.104382 -0.246324 0.2626 1.050415
Predicting...
Scoring...
t+1 t+2 t+3 t+4
R2 0.958766 0.914209 0.840809 0.752345
RMSE 73.686854 106.287386 144.784028 180.586511
MSE 5429.752456 11297.008338 20962.414756 32611.487942
MAE 0.300858 0.402107 0.488673 0.561052
MAPE 30.085829 40.210705 48.867320 56.105228
MPE 3.006045 7.529172 12.844726 18.821786
CPU times: user 5.59 s, sys: 344 ms, total: 5.94 s
Wall time: 5.51 s
In [131]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="Full AR")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Full baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of full baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

AR per station

In [133]:
order = 4
limit_t = 4
In [134]:
%%time
ar_scores_s, ar_preds_s = ar_plot_results("s", order, limit_t)
Fitting...
1 2 3 4
Weights -0.239011 -0.030744 0.332886 0.875556
1 2 3 4
Weights -0.04451 0.070881 0.160271 0.774917
1 2 3 4
Weights -0.021294 -0.328346 0.145003 1.146564
1 2 3 4
Weights -0.046189 -0.255271 0.543684 0.720244
1 2 3 4
Weights 0.046352 -0.058901 0.153449 0.847737
1 2 3 4
Weights -0.02692 -0.217374 0.091465 1.101507
1 2 3 4
Weights -0.066368 -0.168961 0.448847 0.760346
1 2 3 4
Weights -0.141217 -0.066002 0.032806 1.131795
1 2 3 4
Weights -0.09996 -0.090859 0.049693 1.109947
1 2 3 4
Weights 0.020156 -0.235029 -0.175749 1.350505
1 2 3 4
Weights -0.154175 -0.21313 0.685883 0.62963
1 2 3 4
Weights -0.002854 -0.126 0.249603 0.856857
1 2 3 4
Weights 0.311363 -0.248153 0.413767 0.493986
1 2 3 4
Weights -0.002632 -0.274622 0.485806 0.758796
1 2 3 4
Weights 0.028275 -0.183253 0.281521 0.851323
1 2 3 4
Weights -0.037599 -0.175434 0.079094 1.092819
1 2 3 4
Weights 0.178768 -0.111677 0.310868 0.600286
1 2 3 4
Weights -0.048467 -0.18749 0.239054 0.966606
1 2 3 4
Weights -0.01155 0.068634 0.255806 0.652209
1 2 3 4
Weights -0.107572 -0.021628 0.155846 0.955183
1 2 3 4
Weights 0.096517 -0.438079 0.20269 1.112946
1 2 3 4
Weights -0.108564 -0.13882 0.192821 1.01726
1 2 3 4
Weights -0.0553 -0.258783 0.300193 0.972134
1 2 3 4
Weights 0.023045 -0.350205 0.107265 1.168569
1 2 3 4
Weights -0.100345 -0.167473 0.034038 1.189448
1 2 3 4
Weights 0.051196 -0.351564 0.481933 0.78542
1 2 3 4
Weights 0.011194 -0.178485 0.338775 0.808996
1 2 3 4
Weights -0.200047 0.01616 0.299964 0.862368
1 2 3 4
Weights 0.073536 -0.290858 0.430892 0.765782
1 2 3 4
Weights -0.026285 -0.040577 0.2526 0.774319
1 2 3 4
Weights 0.12006 -0.413644 0.349964 0.913495
1 2 3 4
Weights -0.038591 -0.411712 0.427698 0.989628
1 2 3 4
Weights 0.038352 -0.402451 0.205233 1.118623
1 2 3 4
Weights -0.072388 -0.115464 0.184208 0.970213
1 2 3 4
Weights -0.017635 -0.237624 0.294379 0.90596
1 2 3 4
Weights 0.178725 0.137032 -0.038518 0.683629
1 2 3 4
Weights -0.028731 -0.097921 0.243659 0.867067
1 2 3 4
Weights -0.027018 -0.010368 0.171532 0.832572
1 2 3 4
Weights -0.077159 -0.129497 0.255459 0.922132
1 2 3 4
Weights 0.068858 -0.432934 0.335326 1.012417
1 2 3 4
Weights -0.012627 -0.348988 0.287493 1.042015
1 2 3 4
Weights -0.026658 -0.014676 0.399942 0.618076
1 2 3 4
Weights 0.134517 0.118943 0.002652 0.696208
1 2 3 4
Weights -0.143729 0.087779 0.241326 0.786733
1 2 3 4
Weights -0.122666 -0.037228 0.208548 0.903299
1 2 3 4
Weights -0.117714 -0.11038 0.281332 0.91746
1 2 3 4
Weights 0.124435 -0.599753 0.294563 1.150515
1 2 3 4
Weights -0.03195 -0.067119 0.223291 0.843177
1 2 3 4
Weights -0.012262 -0.23811 0.128152 1.102744
1 2 3 4
Weights -0.153248 -0.175558 0.323738 0.967851
1 2 3 4
Weights 0.265793 0.07292 0.063357 0.572159
1 2 3 4
Weights -0.000498 -0.236315 0.137411 1.059229
1 2 3 4
Weights -0.023839 -0.149748 0.422816 0.715355
1 2 3 4
Weights -0.063831 -0.060971 0.202982 0.902974
1 2 3 4
Weights 0.092303 -0.372009 0.299027 0.954059
1 2 3 4
Weights -0.20716 0.093172 0.427846 0.646834
1 2 3 4
Weights 0.04861 0.270962 -0.005895 0.624544
1 2 3 4
Weights -0.084717 -0.142769 0.035538 1.17183
1 2 3 4
Weights 0.107083 -0.09586 0.190457 0.758195
1 2 3 4
Weights 0.015718 -0.277501 0.010018 1.193071
1 2 3 4
Weights -0.047292 -0.120701 0.344442 0.785877
1 2 3 4
Weights -0.043109 -0.137286 0.076971 1.077113
1 2 3 4
Weights 0.12304 0.121994 0.060467 0.652634
1 2 3 4
Weights 0.210366 -0.132966 0.263861 0.633253
1 2 3 4
Weights -0.1902 0.047655 0.317711 0.792653
1 2 3 4
Weights 0.090413 0.077251 -0.104038 0.889021
1 2 3 4
Weights -0.02216 -0.2273 0.008596 1.195109
1 2 3 4
Weights -0.165823 -0.10626 0.368288 0.843766
1 2 3 4
Weights -0.040822 -0.204884 -0.077045 1.286963
1 2 3 4
Weights 0.09383 -0.026028 0.077186 0.826179
1 2 3 4
Weights 0.039244 -0.272065 -0.077182 1.257855
1 2 3 4
Weights -0.130197 0.049751 0.360344 0.681468
1 2 3 4
Weights -0.151208 -0.184047 0.473892 0.818926
1 2 3 4
Weights -0.077634 -0.021332 -0.13393 1.197583
1 2 3 4
Weights -0.115732 -0.11494 0.351974 0.76865
1 2 3 4
Weights 0.000283 -0.033409 0.363755 0.650914
1 2 3 4
Weights 0.328239 0.079135 0.060835 0.458215
1 2 3 4
Weights -0.018483 -0.341942 0.596271 0.732658
1 2 3 4
Weights -0.19672 -0.108802 0.322458 0.959754
1 2 3 4
Weights -0.112404 -0.177327 0.177477 1.067967
1 2 3 4
Weights -0.013096 0.000652 0.257224 0.702342
1 2 3 4
Weights -0.14259 -0.086117 0.303374 0.89429
1 2 3 4
Weights 0.08212 -0.516582 0.3741 1.017632
1 2 3 4
Weights -0.050417 -0.038252 -0.112391 1.169197
1 2 3 4
Weights 0.105633 -0.077113 0.007098 0.922394
1 2 3 4
Weights -0.117991 -0.151532 0.117269 1.108111
1 2 3 4
Weights 0.032297 -0.38242 0.214406 1.079352
1 2 3 4
Weights -0.170998 -0.064751 0.247731 0.959693
1 2 3 4
Weights -0.069125 -0.123473 0.303714 0.848182
1 2 3 4
Weights -0.082228 -0.249629 0.266676 1.020645
1 2 3 4
Weights 0.077655 0.161151 0.232021 0.460597
1 2 3 4
Weights -0.029578 -0.129606 0.378152 0.738752
1 2 3 4
Weights 0.072176 -0.423669 0.084919 1.233982
1 2 3 4
Weights 0.083738 -0.530747 0.290738 1.115118
1 2 3 4
Weights -0.161755 -0.085136 0.264096 0.931605
1 2 3 4
Weights -0.040917 -0.23128 0.35836 0.889268
1 2 3 4
Weights -0.015471 -0.314955 0.368613 0.915367
1 2 3 4
Weights -0.002218 -0.259712 0.295287 0.937215
1 2 3 4
Weights -0.007724 -0.318052 0.135298 1.152677
1 2 3 4
Weights -0.076284 -0.325892 0.224516 1.127474
1 2 3 4
Weights -0.036045 -0.260726 0.42318 0.842168
1 2 3 4
Weights 0.025329 -0.507216 0.523286 0.916933
1 2 3 4
Weights -0.000922 -0.307182 0.444189 0.832838
1 2 3 4
Weights 0.016071 -0.214638 0.46178 0.721333
1 2 3 4
Weights -0.12834 -0.100999 0.183576 1.000802
1 2 3 4
Weights -0.011011 -0.279399 0.275154 0.995432
1 2 3 4
Weights -0.059301 -0.004987 0.119816 0.917586
1 2 3 4
Weights -0.060524 -0.101532 0.103779 1.026932
1 2 3 4
Weights -0.146171 0.024373 0.318887 0.776313
1 2 3 4
Weights -0.097564 -0.206407 0.114773 1.139387
1 2 3 4
Weights -0.052159 -0.130653 0.451709 0.694796
1 2 3 4
Weights -0.171016 -0.120185 0.331951 0.909497
1 2 3 4
Weights -0.119181 0.076059 0.09402 0.909305
1 2 3 4
Weights 0.007696 -0.403526 0.52322 0.843125
1 2 3 4
Weights -0.006106 -0.118641 0.443415 0.652971
1 2 3 4
Weights -0.013199 -0.177512 0.159876 0.979521
1 2 3 4
Weights -0.175178 -0.016458 0.193287 0.972491
1 2 3 4
Weights -0.03497 -0.147583 0.339131 0.806069
1 2 3 4
Weights -0.064442 -0.123552 0.191907 0.967296
1 2 3 4
Weights -0.056754 -0.038222 0.344686 0.688053
1 2 3 4
Weights 0.001674 -0.242723 0.200506 0.984413
1 2 3 4
Weights 0.118409 -0.328175 0.263939 0.908265
1 2 3 4
Weights -0.163739 0.007304 0.182712 0.929236
1 2 3 4
Weights 0.012739 -0.22028 0.268256 0.897069
1 2 3 4
Weights -0.005929 -0.41066 0.4122 0.970619
1 2 3 4
Weights 0.079656 0.14121 0.033701 0.725549
1 2 3 4
Weights -0.084726 -0.161215 0.282391 0.936881
1 2 3 4
Weights -0.031097 -0.06473 0.196454 0.881144
1 2 3 4
Weights -0.053615 -0.236292 0.508711 0.746569
1 2 3 4
Weights -0.026492 -0.390247 0.631583 0.754756
1 2 3 4
Weights 0.018582 -0.35627 0.346936 0.951733
1 2 3 4
Weights 0.192592 0.199338 0.029095 0.539567
1 2 3 4
Weights -0.093705 -0.157829 0.185131 1.025002
1 2 3 4
Weights -0.032493 -0.292108 0.28344 1.012574
1 2 3 4
Weights -0.135992 0.026098 0.081408 1.001068
1 2 3 4
Weights 0.123299 -0.468334 0.46056 0.86345
1 2 3 4
Weights -0.085506 -0.041304 0.11823 0.997068
1 2 3 4
Weights -0.027328 -0.409639 0.324192 1.06644
1 2 3 4
Weights -0.005425 -0.185539 0.231472 0.940889
1 2 3 4
Weights 0.022796 -0.098774 0.318576 0.737635
1 2 3 4
Weights 0.0079 -0.179686 0.234697 0.909007
1 2 3 4
Weights -0.021465 -0.329164 0.130022 1.176117
1 2 3 4
Weights -0.090798 -0.112132 0.287608 0.893543
1 2 3 4
Weights -0.07779 -0.1782 0.260884 0.949222
1 2 3 4
Weights -0.081531 -0.198355 0.220884 1.025189
1 2 3 4
Weights 0.007319 -0.17067 0.375617 0.755282
1 2 3 4
Weights -0.062799 -0.050013 0.177192 0.907357
1 2 3 4
Weights -0.043835 -0.229554 0.291395 0.957848
1 2 3 4
Weights -0.148754 0.030402 -0.055142 1.141431
1 2 3 4
Weights 0.016658 -0.390545 0.489806 0.851128
1 2 3 4
Weights -0.148194 -0.107823 0.222711 1.000541
1 2 3 4
Weights 0.006143 -0.262083 0.159811 1.06372
1 2 3 4
Weights 0.139545 -0.259052 0.345389 0.736143
1 2 3 4
Weights -0.126612 -0.064053 0.244747 0.923433
1 2 3 4
Weights -0.123888 -0.027954 0.290191 0.824913
1 2 3 4
Weights -0.020963 -0.079585 0.343943 0.725807
1 2 3 4
Weights -0.010003 -0.047781 0.293905 0.738272
1 2 3 4
Weights -0.052852 -0.24958 0.042204 1.209014
1 2 3 4
Weights -0.091289 -0.045063 0.087008 1.004614
1 2 3 4
Weights -0.148527 0.109183 0.067945 0.952655
1 2 3 4
Weights 0.063366 -0.273012 0.269735 0.918218
1 2 3 4
Weights -0.024442 -0.334723 0.331225 1.000944
1 2 3 4
Weights -0.102348 -0.020034 0.10835 0.995069
1 2 3 4
Weights -0.174758 -0.074287 0.342057 0.880864
1 2 3 4
Weights -0.016542 -0.239885 0.479236 0.738302
1 2 3 4
Weights -0.054911 -0.220652 0.24612 0.995534
1 2 3 4
Weights -0.001494 -0.148769 0.312387 0.822733
1 2 3 4
Weights -0.129954 0.01617 0.365023 0.72376
1 2 3 4
Weights -0.086863 -0.184032 0.222762 1.027009
1 2 3 4
Weights -0.075957 -0.130832 0.244595 0.93733
1 2 3 4
Weights -0.164224 -0.105306 0.221462 1.018477
1 2 3 4
Weights -0.158093 -0.164902 0.228038 1.061476
1 2 3 4
Weights -0.101152 -0.152001 0.103388 1.117648
1 2 3 4
Weights -0.157022 -0.174412 0.2479 1.040308
1 2 3 4
Weights -0.044472 -0.13652 0.1308 1.003243
1 2 3 4
Weights -0.049487 -0.300978 0.071539 1.240358
1 2 3 4
Weights -0.020614 -0.124179 0.249383 0.863586
1 2 3 4
Weights -0.139051 -0.144672 0.275931 0.979472
1 2 3 4
Weights -0.015033 -0.280425 0.347518 0.926955
1 2 3 4
Weights -0.200268 -0.107663 0.288229 0.962364
1 2 3 4
Weights -0.049524 0.236946 -0.457481 1.219163
1 2 3 4
Weights -0.152456 -0.204223 0.478298 0.819818
1 2 3 4
Weights 0.078692 -0.398178 0.684335 0.600469
1 2 3 4
Weights -0.071355 -0.282222 0.302399 1.00646
1 2 3 4
Weights -0.061192 -0.064666 0.33715 0.765065
1 2 3 4
Weights 0.142283 0.091718 0.087833 0.592898
1 2 3 4
Weights 0.040381 -0.293802 0.353053 0.857731
1 2 3 4
Weights -0.015236 -0.334985 0.072118 1.242668
1 2 3 4
Weights -0.070441 -0.166629 0.270536 0.941417
1 2 3 4
Weights -0.02224 -0.332206 0.462035 0.846757
1 2 3 4
Weights -0.015115 -0.02108 0.069451 0.931873
1 2 3 4
Weights -0.142021 -0.049121 0.075901 1.083496
1 2 3 4
Weights -0.108628 -0.141473 0.244301 0.992678
1 2 3 4
Weights -0.057211 -0.017978 0.076463 0.974845
1 2 3 4
Weights -0.081199 -0.093485 0.244017 0.888482
1 2 3 4
Weights -0.076497 -0.147339 0.300253 0.908541
1 2 3 4
Weights 0.018704 -0.335972 0.407298 0.882215
1 2 3 4
Weights -0.085616 -0.159824 0.331184 0.884377
1 2 3 4
Weights -0.099645 -0.199327 0.304642 0.949209
1 2 3 4
Weights 0.026335 0.054421 0.068426 0.813234
1 2 3 4
Weights -0.06778 -0.24687 0.194488 1.096893
1 2 3 4
Weights -0.08209 -0.253197 0.437392 0.873401
1 2 3 4
Weights 0.006352 -0.335341 0.323608 0.980292
1 2 3 4
Weights -0.056303 -0.150057 -0.009421 1.180032
1 2 3 4
Weights -0.042792 -0.246375 0.461424 0.789189
1 2 3 4
Weights 0.01787 -0.33056 0.400976 0.877816
1 2 3 4
Weights -0.111663 -0.184527 0.292073 0.965981
1 2 3 4
Weights 0.003839 0.029908 0.19883 0.710252
1 2 3 4
Weights 0.106644 -0.196395 0.34991 0.714126
1 2 3 4
Weights -0.147842 -0.148532 0.492551 0.760391
1 2 3 4
Weights -0.128214 -0.057324 0.159542 0.992163
1 2 3 4
Weights 0.387543 0.050412 0.083629 0.459998
1 2 3 4
Weights 0.054416 -0.448337 0.486452 0.872832
1 2 3 4
Weights -0.0175 -0.276658 0.427435 0.830659
1 2 3 4
Weights -0.054675 -0.218117 0.13766 1.103927
1 2 3 4
Weights 0.024076 -0.121586 0.161086 0.911197
1 2 3 4
Weights 0.046859 -0.347589 0.420715 0.857518
1 2 3 4
Weights 0.051419 0.200504 0.019454 0.667541
1 2 3 4
Weights -0.042867 -0.117468 0.385596 0.754282
1 2 3 4
Weights -0.062521 -0.313574 0.466751 0.874052
1 2 3 4
Weights -0.126025 -0.168441 0.110965 1.148821
1 2 3 4
Weights 0.003123 -0.423954 0.476992 0.907413
1 2 3 4
Weights -0.100402 -0.104835 0.199483 0.9776
1 2 3 4
Weights -0.134144 -0.06943 0.140448 1.024587
1 2 3 4
Weights 0.004067 -0.386878 0.497519 0.847897
1 2 3 4
Weights 0.02396 0.040674 0.20183 0.693617
1 2 3 4
Weights 0.146947 -0.145143 0.259808 0.710863
1 2 3 4
Weights -0.091385 -0.12632 0.144993 1.03135
1 2 3 4
Weights 0.046571 -0.382185 0.402895 0.898527
1 2 3 4
Weights -0.152406 -0.040219 0.204166 0.957428
1 2 3 4
Weights -0.185264 -0.081695 0.307177 0.917869
1 2 3 4
Weights -0.00062 -0.17761 0.308205 0.848495
1 2 3 4
Weights -0.140322 -0.200894 0.306046 0.978355
1 2 3 4
Weights -0.063243 -0.10962 0.340966 0.804407
1 2 3 4
Weights 0.004028 -0.179395 0.011483 1.100845
1 2 3 4
Weights 0.057857 -0.165894 0.24443 0.820105
1 2 3 4
Weights -0.195597 -0.170425 0.451909 0.881261
1 2 3 4
Weights 0.013215 -0.284571 0.605763 0.622562
1 2 3 4
Weights -0.078617 0.007638 0.158044 0.89039
1 2 3 4
Weights -0.020097 -0.339282 0.175528 1.150541
1 2 3 4
Weights -0.082406 -0.093883 0.132031 1.008581
1 2 3 4
Weights -0.006825 -0.222415 0.199388 0.987652
1 2 3 4
Weights -0.176941 -0.081369 0.372233 0.835604
1 2 3 4
Weights -0.146666 0.007064 0.262894 0.849449
1 2 3 4
Weights 0.247568 -0.107104 0.134748 0.7042
1 2 3 4
Weights -0.017984 -0.158725 0.159589 0.965248
1 2 3 4
Weights -0.054374 -0.230648 0.435601 0.817282
1 2 3 4
Weights -0.029723 -0.012745 0.16902 0.852139
1 2 3 4
Weights -0.016302 -0.246431 0.211395 1.037201
1 2 3 4
Weights 0.00076 -0.39011 0.419362 0.947966
1 2 3 4
Weights -0.176378 -0.122008 0.413385 0.850955
1 2 3 4
Weights 0.018253 -0.405561 0.44142 0.906175
1 2 3 4
Weights -0.127612 -0.152707 0.157271 1.083019
1 2 3 4
Weights -0.036634 -0.08677 0.213789 0.883125
1 2 3 4
Weights 0.00379 0.06417 0.141996 0.769306
1 2 3 4
Weights -0.141503 -0.034407 0.297028 0.849808
1 2 3 4
Weights -0.177121 -0.103552 0.308807 0.939572
1 2 3 4
Weights -0.042462 -0.047886 0.320406 0.757049
1 2 3 4
Weights 0.041993 -0.414738 0.208363 1.130274
1 2 3 4
Weights 0.019406 -0.031598 0.137986 0.843822
1 2 3 4
Weights 0.066913 -0.355736 0.386146 0.885546
1 2 3 4
Weights 0.003891 -0.309202 0.118001 1.139715
1 2 3 4
Weights -0.048974 -0.06152 0.328869 0.74919
1 2 3 4
Weights -0.041662 -0.34777 0.513933 0.840744
1 2 3 4
Weights 0.003527 -0.001054 0.147813 0.800037
1 2 3 4
Weights 0.037393 -0.420382 0.320206 1.030734
1 2 3 4
Weights -0.104863 -0.14682 0.162751 1.049938
1 2 3 4
Weights 0.012616 -0.243132 0.382314 0.807919
1 2 3 4
Weights 0.011136 -0.335787 0.484812 0.812067
1 2 3 4
Weights -0.083798 -0.185979 0.117505 1.105211
1 2 3 4
Weights -0.141439 -0.051039 0.205181 0.96796
1 2 3 4
Weights -0.0488 -0.137164 0.034094 1.108367
1 2 3 4
Weights 0.039905 -0.476867 0.335318 1.066573
1 2 3 4
Weights 0.004667 -0.272586 0.014738 1.216704
1 2 3 4
Weights -0.123747 0.033267 0.139641 0.914522
1 2 3 4
Weights -0.120942 -0.111533 0.333298 0.86611
1 2 3 4
Weights -0.083531 -0.255097 0.399189 0.893932
1 2 3 4
Weights -0.065474 -0.264587 0.244223 1.056664
1 2 3 4
Weights -0.166757 -0.01246 0.111436 1.031826
1 2 3 4
Weights -0.06011 -0.001404 0.369007 0.672706
1 2 3 4
Weights -0.041307 -0.169824 0.300269 0.899006
1 2 3 4
Weights -0.033788 -0.013926 0.246725 0.777238
1 2 3 4
Weights 0.090833 -0.191396 0.227804 0.84205
1 2 3 4
Weights -0.064576 -0.186379 0.174414 1.04226
1 2 3 4
Weights -0.163708 -0.116502 0.123318 1.123579
1 2 3 4
Weights -0.006345 -0.463512 0.467435 0.971701
1 2 3 4
Weights -0.120193 0.230504 0.292215 0.511996
1 2 3 4
Weights -0.017362 -0.272884 0.410984 0.85721
1 2 3 4
Weights -0.170499 -0.215194 0.360212 0.989585
1 2 3 4
Weights -0.123314 -0.055309 0.05734 1.098121
1 2 3 4
Weights 0.05221 -0.415762 0.470457 0.867202
1 2 3 4
Weights -0.106609 -0.094585 0.425274 0.714323
1 2 3 4
Weights -0.052578 0.012899 0.188195 0.809416
1 2 3 4
Weights 0.065905 0.025711 0.174542 0.712219
1 2 3 4
Weights -0.029701 -0.17151 0.244951 0.929406
1 2 3 4
Weights 0.027606 -0.150049 0.359351 0.739669
1 2 3 4
Weights -0.150261 0.072624 0.110236 0.942348
1 2 3 4
Weights 0.019412 -0.021004 0.030595 0.944754
1 2 3 4
Weights 0.013209 -0.230322 0.37705 0.822729
1 2 3 4
Weights 0.011537 -0.429539 0.318928 1.059298
1 2 3 4
Weights -0.055982 -0.27319 0.175912 1.106204
1 2 3 4
Weights 0.001153 -0.037871 0.277444 0.731097
1 2 3 4
Weights -0.105543 -0.331488 0.352361 1.048734
Predicting...
Scoring...
t+1 t+2 t+3 t+4
R2 0.846456 0.807080 0.743545 0.663658
RMSE 142.192840 159.386203 183.766874 210.451684
MSE 20218.803625 25403.961577 33770.264135 44289.911248
MAE 0.659805 0.704571 0.747664 0.789678
MAPE 65.980504 70.457098 74.766390 78.967843
MPE -27.859338 -23.176606 -18.163449 -14.027863
CPU times: user 6min 27s, sys: 56.1 s, total: 7min 23s
Wall time: 6min 9s
In [135]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [136]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])
x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(ar_scores).T[1], np.array(ar_scores_s).T[1])).T
baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["Full baseline", "Baseline per station", "Baseline per day",
                    "Baseline per station and day", "Full AR", "AR per station"]
arlineObjects = plt.plot(x, model_scores, linewidth=3)
for i, m in zip(range(4), ['D', '*', '|', 'X']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Brouillon